def clone(self): errata_ids = [e["advisory_name"] for e in self.errata_to_clone] if len(errata_ids) == 0: return msg = 'Cloning Errata into %s (%i):' % (self.to_label, len(errata_ids)) print msg log_clean(0, "") log_clean(0, msg) for e in self.errata_to_clone: log_clean(0, "%s - %s" % (e['advisory_name'], e['synopsis'])) pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(errata_ids), finalBarLength=40, stream=sys.stdout) pb.printAll(1) while(len(errata_ids) > 0): errata_set = errata_ids[:self.bunch_size] del errata_ids[:self.bunch_size] self.remote_api.clone_errata(self.to_label, errata_set) pb.addTo(self.bunch_size) pb.printIncrement() self.reset_new_pkgs() pb.printComplete() if not self.no_errata_sync: log_clean(0, "") log_clean(0, "Synchronizing Errata in %s with originals" % self.to_label) self.remote_api.sync_errata(self.to_label)
def clone(self): errata_ids = [e["advisory_name"] for e in self.errata_to_clone] if not errata_ids: return msg = 'Cloning Errata into %s (%i):' % (self.to_label, len(errata_ids)) print(msg) log_clean(0, "") log_clean(0, msg) for e in sorted(self.errata_to_clone, key=lambda x: x['advisory_name']): log_clean(0, "%s - %s" % (e['advisory_name'], e['synopsis'])) pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(errata_ids), finalBarLength=40, stream=sys.stdout) pb.printAll(1) while (errata_ids): errata_set = errata_ids[:self.bunch_size] del errata_ids[:self.bunch_size] self.remote_api.clone_errata(self.to_label, errata_set) pb.addTo(self.bunch_size) pb.printIncrement() self.reset_new_pkgs() pb.printComplete() if not self.no_errata_sync: log_clean(0, "") log_clean( 0, "Synchronizing Errata in %s with originals" % self.to_label) self.remote_api.sync_errata(self.to_label)
def log_error(msg): frame = traceback.extract_stack()[-2] log_clean( 0, "{0}: {1}.{2}({3}) - {4}".format(log_time(), frame[0], frame[2], frame[1], msg), ) sys.stderr.write("{0}\n".format(msg))
def _init_root(self, root): """ Creates a root environment for Zypper, but only if none is around. :return: None """ try: for pth in [ root, os.path.join(root, "etc/zypp/repos.d"), REPOSYNC_ZYPPER_ROOT ]: if not os.path.exists(pth): os.makedirs(pth) except Exception as exc: msg = "Unable to initialise Zypper root for {}: {}".format( root, exc) rhnLog.log_clean(0, msg) sys.stderr.write(str(msg) + "\n") raise try: # Synchronize new GPG keys that come from the Spacewalk GPG keyring self.__synchronize_gpg_keys() except Exception as exc: msg = "Unable to synchronize Spacewalk GPG keyring: {}".format(exc) rhnLog.log_clean(0, msg) sys.stderr.write(str(msg) + "\n")
def log(msg, level=0): frame = traceback.extract_stack()[-2] log_clean( level, "{0}: {1}.{2}({3}) - {4}".format(log_time(), frame[0], frame[2], frame[1], msg), ) if level < 1: sys.stdout.write("{0}\n".format(msg))
def clone_channel(self, original_label, new_label, parent): self.auth_check() details = {'name': new_label, 'label':new_label, 'summary': new_label} if parent and parent != '': details['parent_label'] = parent msg = "Cloning %s to %s with original package set." % (original_label, new_label) log_clean(0, "") log_clean(0, msg) print(msg) self.client.channel.software.clone(self.auth_token, original_label, details, True)
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = [] unsolved_deps = [] for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.append(found['nvrea']) else: unsolved_deps.append(pkg) needed_errata = [] still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: self.original_pid_errata_map[pid] = \ erratum['advisory'] break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None if self.original_pid_errata_map[pid] != None: needed_errata.append(self.original_pid_errata_map[pid]) else: still_needed_pids.append(pid) needed_ids = still_needed_pids for name in needed_names: log_clean(0, name) if len(needed_errata) > 0: log_clean(0, "") log_clean( 0, "Cloning %i errata for dependencies to %s" % (len(needed_errata), self.to_label)) while (len(needed_errata) > 0): errata_set = needed_errata[:self.bunch_size] del needed_errata[:self.bunch_size] if self.detached: self.remote_api.clone_errata_async(self.to_label, errata_set) else: self.remote_api.clone_errata(self.to_label, errata_set) if len(needed_ids) > 0: log_clean(0, "") log_clean( 0, "Adding %i needed dependencies to %s" % (len(needed_ids), self.to_label)) self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = [] unsolved_deps = [] for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.append(found['nvrea']) else: unsolved_deps.append(pkg) needed_errata = [] still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: self.original_pid_errata_map[pid] = \ erratum['advisory'] break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None if self.original_pid_errata_map[pid] != None: needed_errata.append(self.original_pid_errata_map[pid]) else: still_needed_pids.append(pid) needed_ids = still_needed_pids for name in needed_names: log_clean(0, name) if len(needed_errata) > 0: log_clean(0, "") log_clean(0, "Cloning %i errata for dependencies to %s" % (len(needed_errata), self.to_label)) while(len(needed_errata) > 0): errata_set = needed_errata[:self.bunch_size] del needed_errata[:self.bunch_size] if self.detached: self.remote_api.clone_errata_async(self.to_label, errata_set) else: self.remote_api.clone_errata(self.to_label, errata_set) if len(needed_ids) > 0: log_clean(0, "") log_clean(0, "Adding %i needed dependencies to %s" % (len(needed_ids), self.to_label)) self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names
def log2disk(level, msg, cleanYN=0, notimeYN=0): """Log to a log file. Arguments: see def _prepLogMsg(...) above. """ if type(msg) != type([]): msg = [msg] for m in msg: try: log_clean(level=level, msg=_prepLogMsg(m, cleanYN, notimeYN)) except (KeyboardInterrupt, SystemExit): raise except Exception, e: sys.stderr.write("ERROR: upon attempt to write to log file: %s" % e)
def clone_channel(self, original_label, new_label, parent): self.auth_check() details = {'name': new_label, 'label': new_label, 'summary': new_label} if parent and parent != '': details['parent_label'] = parent msg = "Cloning %s to %s with original package set." % (original_label, new_label) log_clean(0, "") log_clean(0, msg) print(msg) self.client.channel.software.clone(self.auth_token, original_label, details, True)
def log2disk(level, msg, cleanYN=0, notimeYN=0): """Log to a log file. Arguments: see def _prepLogMsg(...) above. """ if type(msg) != type([]): msg = [msg] for m in msg: try: log_clean(level=level, msg=_prepLogMsg(m, cleanYN, notimeYN)) except (KeyboardInterrupt, SystemExit): raise except Exception, e: sys.stderr.write('ERROR: upon attempt to write to log file: %s' %e)
def log2disk(level, msg, cleanYN=0, notimeYN=0): """Log to a log file. Arguments: see def _prepLogMsg(...) above. """ if not isinstance(msg, type([])): msg = [msg] for m in msg: try: log_clean(level=level, msg=_prepLogMsg(m, cleanYN, notimeYN)) except (KeyboardInterrupt, SystemExit): raise except Exception, e: # pylint: disable=E0012, W0703 sys.stderr.write('ERROR: upon attempt to write to log file: %s' % e)
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = [] for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.append(found['nvrea']) needed_errata = [] still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: if not self.to_date or (self.to_date and \ datetime.datetime(*time.strptime(erratum[self.use_update_date], '%Y-%m-%d %H:%M:%S')[0:6]).date() <= \ self.to_date.date()): self.original_pid_errata_map[pid] = erratum[ 'advisory'] break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None if self.original_pid_errata_map[pid] != None: needed_errata.append(self.original_pid_errata_map[pid]) else: still_needed_pids.append(pid) needed_ids = still_needed_pids for name in needed_names: log_clean(0, name) if len(needed_errata) > 0: log_clean(0, "") log_clean( 0, "Cloning %i errata for dependencies to %s" % (len(needed_errata), self.to_label)) while (len(needed_errata) > 0): errata_set = needed_errata[:self.bunch_size] del needed_errata[:self.bunch_size] self.remote_api.clone_errata(self.to_label, errata_set) if len(needed_ids) > 0: log_clean(0, "") log_clean( 0, "Adding %i needed dependencies to %s" % (len(needed_ids), self.to_label)) self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names
def log2disk(level, msg, cleanYN=0, notimeYN=0): """Log to a log file. Arguments: see def _prepLogMsg(...) above. """ if not isinstance(msg, type([])): msg = [msg] for m in msg: try: log_clean(level=level, msg=_prepLogMsg(m, cleanYN, notimeYN)) except (KeyboardInterrupt, SystemExit): raise except Exception: # pylint: disable=E0012, W0703 e = sys.exc_info()[1] sys.stderr.write('ERROR: upon attempt to write to log file: %s' % e)
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = [] for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.append(found['nvrea']) needed_errata = [] still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: if not self.to_date or (self.to_date and \ datetime.datetime(*time.strptime(erratum[self.use_update_date], '%Y-%m-%d %H:%M:%S')[0:6]).date() <= \ self.to_date.date()): self.original_pid_errata_map[pid] = erratum['advisory'] break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None if self.original_pid_errata_map[pid] != None: needed_errata.append(self.original_pid_errata_map[pid]) else: still_needed_pids.append(pid) needed_ids = still_needed_pids for name in needed_names: log_clean(0, name) if len(needed_errata) > 0: log_clean(0, "") log_clean(0, "Cloning %i errata for dependencies to %s" % (len(needed_errata), self.to_label)) while(len(needed_errata) > 0): errata_set = needed_errata[:self.bunch_size] del needed_errata[:self.bunch_size] self.remote_api.clone_errata(self.to_label, errata_set) if len(needed_ids) > 0: log_clean(0, "") log_clean(0, "Adding %i needed dependencies to %s" % (len(needed_ids), self.to_label)) self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names
def clone_channel(self, original_label, channel, parent): self.auth_check() details = {'name': channel[0], 'label': channel[0], 'summary': channel[0]} if len(channel) > 1: details['name'] = channel[1] if len(channel) > 2: details['summary'] = channel[2] if len(channel) > 3: details['description'] = channel[3] if parent and parent != '': details['parent_label'] = parent msg = "Cloning %s to %s with original package set." % (original_label, details['label']) log_clean(0, "") log_clean(0, msg) print(msg) self.client.channel.software.clone(self.auth_token, original_label, details, True)
def __remove_packages(self, names_dict, pkg_list, name): """Base removal of packages names_dict - dict containing list of package names, with channel lables as keys pkg_list - list of package dicts to consider name - name of removal 'blacklist' or 'removelist', for display """ found_ids = [] found_names = [] if not names_dict: return full_pkgs = [] if names_dict.has_key("ALL"): full_pkgs += names_dict["ALL"] if names_dict.has_key(self.dest_label()): full_pkgs += names_dict[self.dest_label()] #add dollar signs to each one, other wise foo would match foobar reg_ex = re.compile("$|".join(full_pkgs) + '$') for pkg in pkg_list: if reg_ex.match(pkg['name']): found_ids.append(pkg['id']) found_names.append(pkg['nvrea']) log_clean(0, "") log_clean( 0, "%s: Removing %i packages from %s." % (name, len(found_ids), self.to_label)) log_clean(0, "\n".join(found_names)) if len(found_ids) > 0: print "%s: Removing %i packages from %s" % (name, len(found_ids), self.to_label) self.remote_api.remove_packages(self.to_label, found_ids)
def __remove_packages(self, names_dict, pkg_list, name): """Base removal of packages names_dict - dict containing list of package names, with channel lables as keys pkg_list - list of package dicts to consider name - name of removal 'blacklist' or 'removelist', for display """ found_ids = [] found_names = [] if not names_dict: return full_pkgs = [] if names_dict.has_key("ALL"): full_pkgs += names_dict["ALL"] if names_dict.has_key(self.dest_label()): full_pkgs += names_dict[self.dest_label()] #add dollar signs to each one, other wise foo would match foobar reg_ex = re.compile("$|".join(full_pkgs) + '$') for pkg in pkg_list: if reg_ex.match(pkg['name']): found_ids.append(pkg['id']) found_names.append(pkg['nvrea']) log_clean(0, "") log_clean(0, "%s: Removing %i packages from %s." % (name, len(found_ids), self.to_label)) log_clean(0, "\n".join(found_names)) if len(found_ids) > 0: print "%s: Removing %i packages from %s" % (name, len(found_ids), self.to_label) self.remote_api.remove_packages(self.to_label, found_ids)
def _prep_zypp_repo_url(self, url): """ Prepare the repository baseurl to use in the Zypper repo file. This will add the HTTP Proxy and Client certificate settings as part of the url parameters to be interpreted by CURL during the Zypper execution. :returns: str """ ret_url = None query_params = {} if self.proxy_hostname: query_params['proxy'] = self.proxy_hostname if self.proxy_user: query_params['proxyuser'] = self.proxy_user if self.proxy_pass: query_params['proxypass'] = self.proxy_pass if self.sslcacert: # Since Zypper only accepts CAPATH, we need to split the certificates bundle # and run "c_rehash" on our custom CAPATH _ssl_capath = os.path.dirname(self.sslcacert) msg = "Preparing custom SSL CAPATH at {}".format(_ssl_capath) rhnLog.log_clean(0, msg) sys.stdout.write(str(msg) + "\n") os.system( "awk 'BEGIN {{c=0;}} /BEGIN CERT/{{c++}} {{ print > \"{0}/cert.\" c \".pem\"}}' < {1}" .format(_ssl_capath, self.sslcacert)) os.system("c_rehash {} 2&>1 /dev/null".format(_ssl_capath)) query_params['ssl_capath'] = _ssl_capath if self.sslclientcert: query_params['ssl_clientcert'] = self.sslclientcert if self.sslclientkey: query_params['ssl_clientkey'] = self.sslclientkey new_query = unquote(urlencode(query_params, doseq=True)) if self.authtoken: ret_url = "{0}&{1}".format(url, new_query) else: ret_url = "{0}?{1}".format(url, new_query) if new_query else url return ret_url
def clone(self, skip_depsolve=False): added_pkgs = [] for cloner in self.cloners: cloner.process() pkg_diff = cloner.pkg_diff() added_pkgs += pkg_diff log_clean(0, "") log_clean(0, "%i packages were added to %s as a result of clone:" % (len(pkg_diff), cloner.dest_label())) log_clean(0, "\n".join([pkg['nvrea'] for pkg in pkg_diff])) if len(added_pkgs) > 0 and not skip_depsolve: self.dep_solve([pkg['nvrea'] for pkg in added_pkgs])
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = [] unsolved_deps = [] for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.append(found['nvrea']) else: unsolved_deps.append(pkg) if len(needed_ids) > 0: log_clean(0, "") log_clean(0, "Adding %i needed dependencies to %s" % (len(needed_ids), self.to_label)) for name in needed_names: log_clean(0, name) self.remote_api.add_packages(self.to_label, needed_ids)
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = [] unsolved_deps = [] for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.append(found['nvrea']) else: unsolved_deps.append(pkg) if len(needed_ids) > 0: log_clean(0, "") log_clean( 0, "Adding %i needed dependencies to %s" % (len(needed_ids), self.to_label)) for name in needed_names: log_clean(0, name) self.remote_api.add_packages(self.to_label, needed_ids)
def __remove_packages(self, names_dict, pkg_list, name): """Base removal of packages names_dict - dict containing list of package names, with channel lables as keys pkg_list - list of package dicts to consider name - name of removal 'blacklist' or 'removelist', for display """ found_ids = [] found_names = [] if not names_dict: return full_pkgs = [] if "ALL" in names_dict: full_pkgs += names_dict["ALL"] if self.dest_label() in names_dict: full_pkgs += names_dict[self.dest_label()] # removes all empty string pattern from the list # e.g.: ["", ".*apache.*"] ==> [".*apache.*"], see bsc#1089396 full_pkgs = [x for x in full_pkgs if x != ''] reg_ex = re.compile("|".join(full_pkgs)) # do the matching only if there is a reg_ex criteria if reg_ex.pattern: for pkg in pkg_list: if reg_ex.match(pkg['name']): found_ids.append(pkg['id']) found_names.append(pkg['nvrea']) log_clean(0, "") log_clean( 0, "%s: Removing %i packages from %s." % (name, len(found_ids), self.to_label)) log_clean(0, "\n".join(found_names)) if found_ids: print("%s: Removing %i packages from %s" % (name, len(found_ids), self.to_label)) self.remote_api.remove_packages(self.to_label, found_ids)
def __remove_packages(self, names_dict, pkg_list, name): """Base removal of packages names_dict - dict containing list of package names, with channel lables as keys pkg_list - list of package dicts to consider name - name of removal 'blacklist' or 'removelist', for display """ found_ids = [] found_names = [] if not names_dict: return full_pkgs = [] if names_dict.has_key("ALL"): full_pkgs += names_dict["ALL"] if names_dict.has_key(self.dest_label()): full_pkgs += names_dict[self.dest_label()] # Escape all regexp special characters, such as '+', '.', etc. to # prevent regexp compilation to fault when the package name # contains such special characters for i, s in enumerate(full_pkgs): full_pkgs[i] = re.escape(s) #add dollar signs to each one, other wise foo would match foobar reg_ex = re.compile("$|".join(full_pkgs) + '$') for pkg in pkg_list: if reg_ex.match(pkg['name']): found_ids.append(pkg['id']) found_names.append(pkg['nvrea']) log_clean(0, "") log_clean(0, "%s: Removing %i packages from %s." % (name, len(found_ids), self.to_label)) log_clean(0, "\n".join(found_names)) if len(found_ids) > 0: print "%s: Removing %i packages from %s" % (name, len(found_ids), self.to_label) self.remote_api.remove_packages(self.to_label, found_ids)
def error_msg(self, message): rhnLog.log_clean(0, message) if not self.quiet: sys.stderr.write(str(message) + "\n")
def print_msg(self, message): rhnLog.log_clean(0, message) if not self.quiet: print(message)
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] errata = None if options.errata: errata = set(options.errata) for channel_list in options.channels: # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) # if cloning specific errata validate that they actually exist # in the original channels if options.errata: for channel in channel_labels: channel_errata = set(xmlrpc.list_errata(channel)) if len(errata - channel_errata) != 0: print ("Error: all errata specified with --errata must " + "exist in every original channel cloned in " + "this operation.") print ("Channel %s does not contain these errata: %s" % (channel, errata - channel_errata)) sys.exit(1) if options.parents: tree_cloner = ChannelTreeCloner(channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync, errata, options.parents) else: tree_cloner = ChannelTreeCloner(channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only,options.use_update_date, options.no_errata_sync, errata) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print ("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = set() for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.add(found['nvrea']) needed_errata = set() # list, [0] = advisory, [1] = synopsis still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: self.original_pid_errata_map[pid] = \ erratum['advisory'] needed_errata.add((self.original_pid_errata_map[pid], erratum['synopsis'])) break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None still_needed_pids.append(pid) needed_ids = still_needed_pids # Log the RPMs we're adding due to dep-solving needed_name_set = sorted(set(needed_names)) if len(needed_name_set) > 0: log_clean(0, "") log_clean(0, "Adding %i RPM(s) needed for dependencies to %s" % (len(needed_name_set), self.to_label)) for name in needed_name_set: log_clean(0, name) # Clone (and log) the errata we are adding for same if len(needed_errata) > 0: self.total_added_errata += len(needed_errata) log_clean(0, "") log_clean(0, "Cloning %i errata for dependencies to %s :" % (len(needed_errata), self.to_label)) needed_errata_list = sorted(list(needed_errata)) while(len(needed_errata_list) > 0): errata_set = needed_errata_list[:self.bunch_size] del needed_errata_list[:self.bunch_size] for e in errata_set: log_clean(0, "%s - %s" % e) if not self.skip_errata_depsolve: e_pkgs = self.remote_api.get_erratum_packages(e[0]) else: e_pkgs = [] for pkg in e_pkgs: if self.from_label in pkg['providing_channels']: pkg['nvrea'] = "%s-%s-%s.%s" % (pkg['name'], pkg['version'], pkg['release'], pkg['arch_label']) needed_names.add(pkg['nvrea'] ) self.remote_api.clone_errata(self.to_label, [e[0] for e in errata_set]) if len(needed_ids) > 0: self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] errata = None if options.errata: errata = set(options.errata) for channel_list in options.channels: parents = None if options.parents: # if only the dest parent is specified, look up the src parent if len(options.parents) == 1: src_parent = xmlrpc.get_original(options.parents[0]) if not src_parent: print ("Channel %s is not a cloned channel." % options.parents[0]) sys.exit(1) print "Looking up the original channel for %s, %s found" % ( options.parents[0], src_parent) options.parents = [src_parent] + options.parents # options.parents is only set by command line, this must be the # only channel tree parents = options.parents # Handle the new-style channel specification that uses # key value pairs. Transform into channel / parent setup that # ChannelTreeCloner expects. This code has to be here now that you can # specify parents for multiple trees. # TODO: the channel / parents structure needs to be cleaned up throught # clone-by-date. Probably best thing would to make everywhere use the # dict structure instead of the list structure. for src_channel in channel_list.keys(): dest_channel = channel_list[src_channel] # new-style config file channel specification if type(dest_channel) == dict: if 'label' not in dest_channel: raise UserError("You must specify a label for the clone of %s" % src_channel) label = dest_channel['label'] if 'name' in dest_channel: name = dest_channel['name'] else: name = label if 'summary' in dest_channel: summary = dest_channel['summary'] else: summary = label if 'description' in dest_channel: description = dest_channel['description'] else: description = label # This is the options.parents equivalent for config files. # Add channels to parents option and remove from channels. if ('existing-parent-do-not-modify' in dest_channel and dest_channel['existing-parent-do-not-modify']): parents = [src_channel, label] del channel_list[src_channel] else: # else tranform channel_list entry to the list format channel_list[src_channel] = [label, name, summary, description] # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) # ensure the parent's channel metadata is available if parents: for label in parents: if not os.path.exists(repodata(label)): raise UserRepoError(label) # if cloning specific errata validate that they actually exist # in the original channels if options.errata: for channel in channel_labels: channel_errata = set(xmlrpc.list_errata(channel)) if len(errata - channel_errata) != 0: print ("Error: all errata specified with --errata must " + "exist in every original channel cloned in " + "this operation.") print ("Channel %s does not contain these errata: %s" % (channel, errata - channel_errata)) sys.exit(1) tree_cloner = ChannelTreeCloner(channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.security_only, options.use_update_date, options.no_errata_sync, errata, parents) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() if options.dry_run: for tree_cloner in cloners: d_errata = {} separator = "|" d_errata = tree_cloner.get_errata_to_clone() now = datetime.datetime.now() for ch in d_errata: log_file = ch + "_" + now.strftime("%Y-%m-%d-%H:%M") print "# Log file: " + log_file fh = open(log_file, 'w') for errata in d_errata[ch]: line = "" for item in list(set(errata) - set(['id'])): line = line + str(errata[item]) + separator fh.write(line + "\n") fh.close() sys.exit(0) print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print ("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = set() for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.add(found['nvrea']) needed_errata = set() # list, [0] = advisory, [1] = synopsis still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: self.original_pid_errata_map[pid] = \ erratum['advisory'] needed_errata.add((self.original_pid_errata_map[pid], erratum['synopsis'])) break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None still_needed_pids.append(pid) needed_ids = still_needed_pids # Log the RPMs we're adding due to dep-solving needed_name_set = sorted(set(needed_names)) if needed_name_set: log_clean(0, "") log_clean( 0, "Adding %i RPM(s) needed for dependencies to %s" % (len(needed_name_set), self.to_label)) for name in needed_name_set: log_clean(0, name) # Clone (and log) the errata we are adding for same if needed_errata: self.total_added_errata += len(needed_errata) log_clean(0, "") log_clean( 0, "Cloning %i errata for dependencies to %s :" % (len(needed_errata), self.to_label)) needed_errata_list = sorted(list(needed_errata)) while (needed_errata_list): errata_set = needed_errata_list[:self.bunch_size] del needed_errata_list[:self.bunch_size] for e in errata_set: log_clean(0, "%s - %s" % e) if not self.skip_errata_depsolve: e_pkgs = self.remote_api.get_erratum_packages(e[0]) else: e_pkgs = [] for pkg in e_pkgs: if self.from_label in pkg['providing_channels']: pkg['nvrea'] = "%s-%s-%s.%s" % ( pkg['name'], pkg['version'], pkg['release'], pkg['arch_label']) needed_names.add(pkg['nvrea']) self.remote_api.clone_errata(self.to_label, [e[0] for e in errata_set]) if needed_ids: self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] errata = None if options.errata: errata = set(options.errata) for channel_list in options.channels: parents = None if options.parents: # if only the dest parent is specified, look up the src parent if len(options.parents) == 1: src_parent = xmlrpc.get_original(options.parents[0]) if not src_parent: print("Channel %s is not a cloned channel." % options.parents[0]) sys.exit(1) print "Looking up the original channel for %s, %s found" % ( options.parents[0], src_parent) options.parents = [src_parent] + options.parents # options.parents is only set by command line, this must be the # only channel tree parents = options.parents # Handle the new-style channel specification that uses # key value pairs. Transform into channel / parent setup that # ChannelTreeCloner expects. This code has to be here now that you can # specify parents for multiple trees. # TODO: the channel / parents structure needs to be cleaned up throught # clone-by-date. Probably best thing would to make everywhere use the # dict structure instead of the list structure. for src_channel in channel_list.keys(): dest_channel = channel_list[src_channel] # new-style config file channel specification if type(dest_channel) == dict: if 'label' not in dest_channel: raise UserError( "You must specify a label for the clone of %s" % src_channel) label = dest_channel['label'] if 'name' in dest_channel: name = dest_channel['name'] else: name = label if 'summary' in dest_channel: summary = dest_channel['summary'] else: summary = label if 'description' in dest_channel: description = dest_channel['description'] else: description = label # This is the options.parents equivalent for config files. # Add channels to parents option and remove from channels. if ('existing-parent-do-not-modify' in dest_channel and dest_channel['existing-parent-do-not-modify']): parents = [src_channel, label] del channel_list[src_channel] else: # else tranform channel_list entry to the list format channel_list[src_channel] = [ label, name, summary, description ] # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) # ensure the parent's channel metadata is available if parents: for label in parents: if not os.path.exists(repodata(label)): raise UserRepoError(label) # if cloning specific errata validate that they actually exist # in the original channels if options.errata: for channel in channel_labels: channel_errata = set(xmlrpc.list_errata(channel)) if len(errata - channel_errata) != 0: print("Error: all errata specified with --errata must " + "exist in every original channel cloned in " + "this operation.") print("Channel %s does not contain these errata: %s" % (channel, errata - channel_errata)) sys.exit(1) tree_cloner = ChannelTreeCloner( channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync, errata, parents) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() if options.dry_run: for tree_cloner in cloners: d_errata = {} separator = "|" d_errata = tree_cloner.get_errata_to_clone() now = datetime.datetime.now() for ch in d_errata: log_file = ch + "_" + now.strftime("%Y-%m-%d-%H:%M") print "# Log file: " + log_file fh = open(log_file, 'w') for errata in d_errata[ch]: line = "" for item in list(set(errata) - set(['id'])): line = line + str(errata[item]) + separator fh.write(line + "\n") fh.close() sys.exit(0) print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def log_msg(message): rhnLog.log_clean(0, message)
def print_msg(self, message): rhnLog.log_clean(0, message) if not self.quiet: print message
def error_msg(self, message): rhnLog.log_clean(0, message) sys.stderr.write(str(message) + "\n")
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] for channel_list in options.channels: # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) if options.parents: tree_cloner = ChannelTreeCloner( channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync, options.parents) else: tree_cloner = ChannelTreeCloner( channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def process_deps(self, needed_pkgs): needed_ids = [] needed_names = set() for pkg in needed_pkgs: found = self.src_pkg_exist([pkg]) if found: needed_ids.append(found['id']) needed_names.add(found['nvrea']) needed_errata = set() still_needed_pids = [] for pid in needed_ids: if pid not in self.original_pid_errata_map: errata_list = self.remote_api.list_providing_errata(pid) for erratum in errata_list: if erratum['advisory'] in self.original_errata: self.original_pid_errata_map[pid] = \ erratum['advisory'] break else: # no match found, store so we don't repeat search self.original_pid_errata_map[pid] = None if self.original_pid_errata_map[pid] != None: needed_errata.add(self.original_pid_errata_map[pid]) else: still_needed_pids.append(pid) needed_ids = still_needed_pids # Log the RPMs we're adding due to dep-solving if len(needed_ids) > 0: log_clean(0, "") log_clean( 0, "Adding %i RPM(s) needed for dependencies to %s" % (len(needed_ids), self.to_label)) if len(needed_names) > 0: for name in needed_names: log_clean(0, name) # Clone (and log) the errata we are adding for same if len(needed_errata) > 0: log_clean(0, "") log_clean( 0, "Cloning %i errata for dependencies to %s :" % (len(needed_errata), self.to_label)) needed_errata_list = list(needed_errata) while (len(needed_errata_list) > 0): errata_set = needed_errata_list[:self.bunch_size] del needed_errata_list[:self.bunch_size] for e in errata_set: log_clean(0, "%s" % e) self.remote_api.clone_errata(self.to_label, errata_set) if len(needed_ids) > 0: self.remote_api.add_packages(self.to_label, needed_ids) self.reset_new_pkgs() return needed_names