def cleanupHandler(self, req): self._timer() retval = self._wrapper(req, self._cleanupHandler) self._cleanup() # Reset the logger to stderr initLOG() return retval
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = self.synced_channels # Check channel availability before doing anything not_available = [] for channel in channels: if any(channel not in d for d in [self.channel_metadata, self.channel_to_family, self.content_source_mapping]): not_available.append(channel) if not_available: raise ChannelNotFoundError(" " + "\n ".join(not_available)) # Need to update channel metadata self._update_channels_metadata(channels) # Finally, sync channel content total_time = datetime.timedelta() for channel in channels: cur_time = self._sync_channel(channel) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0])
def main(): global debug, verbose parser = OptionParser(option_list=options_table) (options, args) = parser.parse_args() if args: for arg in args: sys.stderr.write("Not a valid option ('%s'), try --help\n" % arg) sys.exit(-1) if options.verbose: initLOG("stdout", options.verbose or 0) verbose = 1 if options.debug: initLOG(CFG.LOG_FILE, options.debug or 0) debug = 1 rhnSQL.initDB() if options.update_filer: process_package_data() if options.update_sha256: process_sha256_packages() if options.update_kstrees: process_kickstart_trees() if options.update_package_files: process_package_files() if options.update_changelog: process_changelog()
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = self.synced_channels # Check channel availability before doing anything not_available = [] for channel in channels: if any(channel not in d for d in [self.channel_metadata, self.channel_to_family]) or ( not self.cdn_repository_manager.check_channel_availability(channel, self.no_kickstarts)): not_available.append(channel) if not_available: raise ChannelNotFoundError(" " + "\n ".join(not_available)) # Need to update channel metadata self._update_channels_metadata(channels) # Finally, sync channel content error_messages = [] total_time = datetime.timedelta() for channel in channels: cur_time, ret_code = self._sync_channel(channel) if ret_code != 0: error_messages.append("Problems occurred during syncing channel %s. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (channel, channel)) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0]) return error_messages
def setup_config(self, config, force=0): # Figure out the log level debug_level = self.options.verbose if debug_level is None: debug_level = CFG.debug self.debug_level = debug_level logfile = self.options.logfile if logfile is None or logfile == '': logfile = CFG.log_file initLOG(level=debug_level, log_file=logfile) # Get the ssl cert ssl_cert = CFG.osa_ssl_cert try: self.check_cert(ssl_cert) except jabber_lib.InvalidCertError: e = sys.exc_info()[1] log_error("Invalid SSL certificate:", e) return 1 self.ssl_cert = ssl_cert rhnSQL.initDB() self._username = '******' self._password = self.get_dispatcher_password(self._username) if not self._password: self._password = self.create_dispatcher_password(32) self._resource = 'superclient' js = config.get('jabber_server') self._jabber_servers = [ idn_ascii_to_puny(js) ]
def headerParserHandler(self, req): log_setreq(req) # init configuration options with proper component options = req.get_options() # if we are initializing out of a <Location> handler don't # freak out if not options.has_key("RHNComponentType"): # clearly nothing to do return apache.OK initCFG(options["RHNComponentType"]) initLOG(CFG.LOG_FILE, CFG.DEBUG) if req.method == 'GET': # This is the ping method return apache.OK self.servers = rhnImport.load("upload_server/handlers", interface_signature='upload_class') if not options.has_key('SERVER'): log_error("SERVER not set in the apache config files!") return apache.HTTP_INTERNAL_SERVER_ERROR server_name = options['SERVER'] if not self.servers.has_key(server_name): log_error("Unable to load server %s from available servers %s" % (server_name, self.servers)) return apache.HTTP_INTERNAL_SERVER_ERROR server_class = self.servers[server_name] self.server = server_class(req) return self._wrapper(req, "headerParserHandler")
def processCommandline(): usage = "%(prog)s [options] [command]" parser = argparse.ArgumentParser(usage=usage) parser.add_argument("-r", "--root-ca-file", help="Path to the Root CA") parser.add_argument( "-i", "--intermediate-ca-file", action="append", default=[], help="Path to an intermediate CA", ) parser.add_argument("-s", "--server-cert-file", help="Path to the Server Certificate") parser.add_argument("-k", "--server-key-file", help="Path to the Server Private Key") parser.add_argument("--check-only", "-c", action="store_true") parser.add_argument("--verbose", "-v", action="count", default=0) options = parser.parse_args() initLOG(LOGFILE, options.verbose or 1) log(sys.argv, 1) return options
def setup_config(self, config, force=0): # Figure out the log level debug_level = self.options.verbose if debug_level is None: debug_level = CFG.debug self.debug_level = debug_level logfile = self.options.logfile if logfile is None or logfile == '': logfile = CFG.log_file initLOG(level=debug_level, log_file=logfile) # Get the ssl cert ssl_cert = CFG.osa_ssl_cert try: self.check_cert(ssl_cert) except jabber_lib.InvalidCertError: e = sys.exc_info()[1] log_error("Invalid SSL certificate:", e) return 1 self.ssl_cert = ssl_cert rhnSQL.initDB() self._username = '******' self._password = self.get_dispatcher_password(self._username) if not self._password: self._password = self.create_dispatcher_password(32) self._resource = 'superclient' js = config.get('jabber_server') self._jabber_servers = [idn_ascii_to_puny(js)]
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = self.synced_channels # Check channel availability before doing anything not_available = [] for channel in channels: if any(channel not in d for d in [ self.channel_metadata, self.channel_to_family, self.content_source_mapping ]): not_available.append(channel) if not_available: raise ChannelNotFoundError(" " + "\n ".join(not_available)) # Need to update channel metadata self._update_channels_metadata(channels) # Finally, sync channel content total_time = datetime.timedelta() for channel in channels: cur_time = self._sync_channel(channel) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0])
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = list(self.synced_channels) # Check channel availability before doing anything not_available = [] available = [] for channel in channels: if not self._is_channel_available(channel): not_available.append(channel) else: available.append(channel) channels = available error_messages = [] # if we have not_available channels log the error immediately if not_available: msg = "ERROR: these channels either do not exist or are not available:\n " + "\n ".join(not_available) error_messages.append(msg) # BZ 1434913 - let user know satellite may not be activated if all channels are in not_available if not available: msg = "WARNING: Is your Red Hat Satellite activated for CDN?\n" msg += "(to see details about currently used SSL certificates for accessing CDN:" msg += " /usr/bin/cdn-sync --cdn-certs)" error_messages.append(msg) # Need to update channel metadata self._update_channels_metadata([ch for ch in channels if ch in self.channel_metadata]) # Make sure custom channels are properly connected with repos for channel in channels: if channel in self.synced_channels and self.synced_channels[channel]: self.cdn_repository_manager.assign_repositories_to_channel(channel) reposync.clear_ssl_cache() # Finally, sync channel content total_time = timedelta() for channel in channels: cur_time, failed_packages = self._sync_channel(channel) if failed_packages < 0: error_messages.append("Problems occurred during syncing channel %s. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (channel, channel)) if failed_packages > 0: error_messages.append("%d packages in channel %s failed to sync. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (failed_packages, channel, channel)) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0]) return error_messages
def __init__( self, channel_label, repo_type, url=None, fail=False, quiet=False, filters=None, no_errata=False, sync_kickstart=False, latest=False, ): self.regen = False self.fail = fail self.quiet = quiet self.filters = filters or [] self.no_errata = no_errata self.sync_kickstart = sync_kickstart self.latest = latest initCFG("server") rhnSQL.initDB() # setup logging log_filename = channel_label + ".log" rhnLog.initLOG(default_log_location + log_filename) # os.fchown isn't in 2.4 :/ os.system("chgrp apache " + default_log_location + log_filename) self.log_msg("\nSync started: %s" % (time.asctime(time.localtime()))) self.log_msg(str(sys.argv)) self.channel_label = channel_label self.channel = self.load_channel() if not self.channel or not rhnChannel.isCustomChannel(self.channel["id"]): self.print_msg("Channel does not exist or is not custom.") sys.exit(1) if not url: # TODO:need to look at user security across orgs h = rhnSQL.prepare( """select s.id, s.source_url, s.label from rhnContentSource s, rhnChannelContentSource cs where s.id = cs.source_id and cs.channel_id = :channel_id""" ) h.execute(channel_id=int(self.channel["id"])) source_data = h.fetchall_dict() if source_data: self.urls = [(row["id"], row["source_url"], row["label"]) for row in source_data] else: self.error_msg("Channel has no URL associated") sys.exit(1) else: self.urls = [(None, u, None) for u in url] self.repo_plugin = self.load_plugin(repo_type)
def __init__(self, debug): self.debug = debug rhnLog.initLOG(DEFAULT_LOG_LOCATION + 'mgr-delete-patch.log', self.debug) try: rhnSQL.initDB() except rhnSQL.SQLConnectError, e: log_error("Could not connect to the database. %s" % e) raise Exception("Could not connect to the database. %s" % e)
def cleanupHandler(self, req): if req.method == 'GET': # This is the ping method return apache.OK retval = self._wrapper(req, "cleanupHandler") # Reset the logger to stderr initLOG() self.server = None return retval
def __init__(self, channel_label, repo_type, url=None, fail=False, quiet=False, filters=None, no_errata=False, sync_kickstart=False, latest=False, strict=0): self.regen = False self.fail = fail self.quiet = quiet self.filters = filters or [] self.no_errata = no_errata self.sync_kickstart = sync_kickstart self.latest = latest initCFG('server.satellite') rhnSQL.initDB() # setup logging log_filename = channel_label + '.log' if CFG.DEBUG is not None: log_level = CFG.DEBUG rhnLog.initLOG(default_log_location + log_filename, log_level) # os.fchown isn't in 2.4 :/ if isSUSE(): os.system("chgrp www " + default_log_location + log_filename) else: os.system("chgrp apache " + default_log_location + log_filename) self.log_msg("\nSync started: %s" % (time.asctime(time.localtime()))) self.log_msg(str(sys.argv)) self.channel_label = channel_label self.channel = self.load_channel() if not self.channel: self.print_msg("Channel does not exist.") sys.exit(1) if not url: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.id, s.source_url, s.label, fm.channel_family_id from rhnContentSource s, rhnChannelContentSource cs, rhnChannelFamilyMembers fm where s.id = cs.source_id and cs.channel_id = fm.channel_id and cs.channel_id = :channel_id""") h.execute(channel_id=int(self.channel['id'])) source_data = h.fetchall_dict() if source_data: self.urls = [(row['id'], row['source_url'], row['label'], row['channel_family_id']) for row in source_data] else: self.error_msg("Channel has no URL associated") sys.exit(1) else: self.urls = [(None, u, None, None) for u in url] self.repo_plugin = self.load_plugin(repo_type) self.strict = strict
def __init__(self, channel_label, repo_type, url=None, fail=False, quiet=False, filters=None, no_errata=False, sync_kickstart=False, latest=False): self.regen = False self.fail = fail self.quiet = quiet self.filters = filters or [] self.no_errata = no_errata self.sync_kickstart = sync_kickstart self.latest = latest initCFG('server') rhnSQL.initDB() # setup logging log_filename = channel_label + '.log' rhnLog.initLOG(default_log_location + log_filename) # os.fchown isn't in 2.4 :/ os.system("chgrp apache " + default_log_location + log_filename) self.log_msg("\nSync started: %s" % (time.asctime(time.localtime()))) self.log_msg(str(sys.argv)) self.channel_label = channel_label self.channel = self.load_channel() if not self.channel or not rhnChannel.isCustomChannel( self.channel['id']): self.print_msg("Channel does not exist or is not custom.") sys.exit(1) if not url: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.id, s.source_url, s.label from rhnContentSource s, rhnChannelContentSource cs where s.id = cs.source_id and cs.channel_id = :channel_id""") h.execute(channel_id=int(self.channel['id'])) source_data = h.fetchall_dict() if source_data: self.urls = [(row['id'], row['source_url'], row['label']) for row in source_data] else: self.error_msg("Channel has no URL associated") sys.exit(1) else: self.urls = [(None, u, None) for u in url] self.repo_plugin = self.load_plugin(repo_type)
def setup_config(self, config): # Figure out the log level debug_level = self.options.verbose if debug_level is None: debug_level = CFG.debug self.debug_level = debug_level initLOG(level=debug_level, log_file=CFG.log_file) # Get the ssl cert ssl_cert = CFG.osa_ssl_cert try: self.check_cert(ssl_cert) except jabber_lib.InvalidCertError, e: log_error("Invalid SSL certificate:", e) return 1
def headerParserHandler(self, req): log_setreq(req) # We need to init CFG and Logging options = req.get_options() # if we are initializing out of a <Location> handler don't # freak out if "RHNComponentType" not in options: # clearly nothing to do return apache.OK initCFG(options["RHNComponentType"]) initLOG(CFG.LOG_FILE, CFG.DEBUG) """ parse the request, init database and figure out what can we call """ log_debug(2, req.the_request) # call method from inherited class ret = apacheSession.headerParserHandler(self, req) if ret != apache.OK: return ret # make sure we have DB connection if not CFG.SEND_MESSAGE_TO_ALL: try: rhnSQL.initDB() except rhnSQL.SQLConnectError: rhnTB.Traceback(mail=1, req=req, severity="schema") return apache.HTTP_INTERNAL_SERVER_ERROR else: # If in outage mode, close the DB connections rhnSQL.closeDB() # Store client capabilities client_cap_header = 'X-RHN-Client-Capability' if client_cap_header in req.headers_in: client_caps = req.headers_in[client_cap_header] client_caps = [ _f for _f in list( map(string.strip, string.split(client_caps, ","))) if _f ] rhnCapability.set_client_capabilities(client_caps) # Enabling the input header flags associated with the redirects/newer clients redirect_support_flags = [ 'X-RHN-Redirect', 'X-RHN-Transport-Capability' ] for flag in redirect_support_flags: if flag in req.headers_in: rhnFlags.set(flag, str(req.headers_in[flag])) return apache.OK
def headerParserHandler(self, req): log_setreq(req) # We need to init CFG and Logging options = req.get_options() # if we are initializing out of a <Location> handler don't # freak out if not options.has_key("RHNComponentType"): # clearly nothing to do return apache.OK initCFG(options["RHNComponentType"]) initLOG(CFG.LOG_FILE, CFG.DEBUG) """ parse the request, init database and figure out what can we call """ log_debug(2, req.the_request) # call method from inherited class ret = apacheSession.headerParserHandler(self, req) if ret != apache.OK: return ret # make sure we have DB connection if not CFG.SEND_MESSAGE_TO_ALL: try: rhnSQL.initDB() except rhnSQL.SQLConnectError: rhnTB.Traceback(mail=1, req=req, severity="schema") return apache.HTTP_INTERNAL_SERVER_ERROR else: # If in outage mode, close the DB connections rhnSQL.closeDB() # Store client capabilities client_cap_header = 'X-RHN-Client-Capability' if req.headers_in.has_key(client_cap_header): client_caps = req.headers_in[client_cap_header] client_caps = filter(None, map(string.strip, string.split(client_caps, ",")) ) rhnCapability.set_client_capabilities(client_caps) # Enabling the input header flags associated with the redirects/newer clients redirect_support_flags = ['X-RHN-Redirect', 'X-RHN-Transport-Capability'] for flag in redirect_support_flags: if req.headers_in.has_key(flag): rhnFlags.set(flag, str(req.headers_in[flag])) return apache.OK
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = list(self.synced_channels) # Check channel availability before doing anything not_available = [] for channel in channels: if not self._is_channel_available(channel): not_available.append(channel) if not_available: raise ChannelNotFoundError(" " + "\n ".join(not_available)) # Need to update channel metadata self._update_channels_metadata( [ch for ch in channels if ch in self.channel_metadata]) # Make sure custom channels are properly connected with repos for channel in channels: if channel in self.synced_channels and self.synced_channels[ channel]: self.cdn_repository_manager.assign_repositories_to_channel( channel) # Finally, sync channel content error_messages = [] total_time = timedelta() for channel in channels: cur_time, failed_packages = self._sync_channel(channel) if failed_packages < 0: error_messages.append( "Problems occurred during syncing channel %s. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (channel, channel)) if failed_packages > 0: error_messages.append( "%d packages in channel %s failed to sync. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (failed_packages, channel, channel)) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0]) return error_messages
def headerParserHandler(self, req): # pylint: disable=W0201 log_setreq(req) self.start_time = time.time() # init configuration options with proper component options = req.get_options() # if we are initializing out of a <Location> handler don't # freak out if "RHNComponentType" not in options: # clearly nothing to do return apache.OK initCFG(options["RHNComponentType"]) initLOG(CFG.LOG_FILE, CFG.DEBUG) # short-circuit everything if sending a system-wide message. if CFG.SEND_MESSAGE_TO_ALL: # Drop the database connection # pylint: disable=W0702 try: rhnSQL.closeDB() except: pass # Fetch global message being sent to clients if applicable. msg = open(CFG.MESSAGE_TO_ALL).read() log_debug(3, "Sending message to all clients: %s" % msg) return self._send_xmlrpc( req, rhnFault(-1, _("IMPORTANT MESSAGE FOLLOWS:\n%s") % msg, explain=0)) rhnSQL.initDB() self.server = options['SERVER'] self.server_classes = rhnImport.load("satellite_exporter/handlers") if self.server not in self.server_classes: # XXX do something interesting here log_error("Missing server", self.server) return apache.HTTP_NOT_FOUND return self._wrapper(req, self._headerParserHandler)
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = self.synced_channels # Check channel availability before doing anything not_available = [] for channel in channels: if any( channel not in d for d in [self.channel_metadata, self.channel_to_family] ) or (not self.cdn_repository_manager.check_channel_availability( channel, self.no_kickstarts)): not_available.append(channel) if not_available: raise ChannelNotFoundError(" " + "\n ".join(not_available)) # Need to update channel metadata self._update_channels_metadata(channels) # Finally, sync channel content error_messages = [] total_time = timedelta() for channel in channels: cur_time, failed_packages = self._sync_channel(channel) if failed_packages < 0: error_messages.append( "Problems occurred during syncing channel %s. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (channel, channel)) if failed_packages > 0: error_messages.append( "%d packages in channel %s failed to sync. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (failed_packages, channel, channel)) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0]) return error_messages
def __init__(self, all=False, channel=None, debug=0): # pylint: disable=redefined-builtin self.all = all self.channel = channel self.debug = debug if not all and not channel: print("You need to specify either --all or --channel") sys.exit(1) initCFG("server.susemanager") if self.debug == 0: self.debug = CFG.DEBUG rhnLog.initLOG(DEFAULT_LOG_LOCATION + 'mgr-clean-old-patchnames.log', self.debug) try: rhnSQL.initDB() except rhnSQL.SQLConnectError as e: log_error("Could not connect to the database. %s" % e) raise Exception("Could not connect to the database. %s" % e)
def __call__(self, req): # NOTE: all imports done here due to required initialization of # of the configuration module before all others. # Initialization is dependent on RHNComponentType in the # req object. if self.__init: from apacheHandler import getComponentType # We cannot trust the config files to tell us if we are in the # broker or in the redirect because we try to always pass # upstream all requests componentType = getComponentType(req) initCFG(componentType) initLOG(CFG.LOG_FILE, CFG.DEBUG) log_debug(1, 'New request, component %s' % (componentType, )) # Instantiate the handlers if HandlerWrap.svrHandlers is None: HandlerWrap.svrHandlers = self.get_handler_factory(req)() if self.__init: # Set the component type HandlerWrap.svrHandlers.set_component(componentType) try: log_setreq(req) if hasattr(HandlerWrap.svrHandlers, self.__name): f = getattr(HandlerWrap.svrHandlers, self.__name) ret = f(req) else: raise Exception("Class has no attribute %s" % self.__name) # pylint: disable=W0702 except: Traceback(self.__name, req, extra="Unhandled exception type", severity="unhandled") return apache.HTTP_INTERNAL_SERVER_ERROR else: return ret
def headerParserHandler(self, req): # pylint: disable=W0201 log_setreq(req) self.start_time = time.time() # init configuration options with proper component options = req.get_options() # if we are initializing out of a <Location> handler don't # freak out if "RHNComponentType" not in options: # clearly nothing to do return apache.OK initCFG(options["RHNComponentType"]) initLOG(CFG.LOG_FILE, CFG.DEBUG) # short-circuit everything if sending a system-wide message. if CFG.SEND_MESSAGE_TO_ALL: # Drop the database connection # pylint: disable=W0702 try: rhnSQL.closeDB() except: pass # Fetch global message being sent to clients if applicable. msg = open(CFG.MESSAGE_TO_ALL).read() log_debug(3, "Sending message to all clients: %s" % msg) return self._send_xmlrpc(req, rhnFault(-1, _("IMPORTANT MESSAGE FOLLOWS:\n%s") % msg, explain=0)) rhnSQL.initDB() self.server = options['SERVER'] self.server_classes = rhnImport.load("satellite_exporter/handlers") if self.server not in self.server_classes: # XXX do something interesting here log_error("Missing server", self.server) return apache.HTTP_NOT_FOUND return self._wrapper(req, self._headerParserHandler)
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] errata = None if options.errata: errata = set(options.errata) for channel_list in options.channels: # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) # if cloning specific errata validate that they actually exist # in the original channels if options.errata: for channel in channel_labels: channel_errata = set(xmlrpc.list_errata(channel)) if len(errata - channel_errata) != 0: print ("Error: all errata specified with --errata must " + "exist in every original channel cloned in " + "this operation.") print ("Channel %s does not contain these errata: %s" % (channel, errata - channel_errata)) sys.exit(1) if options.parents: tree_cloner = ChannelTreeCloner(channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync, errata, options.parents) else: tree_cloner = ChannelTreeCloner(channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only,options.use_update_date, options.no_errata_sync, errata) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print ("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def __init__(self, no_packages=False, no_errata=False, no_rpms=False, no_kickstarts=False, log_level=None): self.no_packages = no_packages self.no_errata = no_errata self.no_rpms = no_rpms self.no_kickstarts = no_kickstarts if log_level is None: log_level = 0 self.log_level = log_level CFG.set('DEBUG', log_level) rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Command: %s" % str(sys.argv)) rhnSQL.initDB() initCFG('server.satellite') try: # Channel families mapping to channels with open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') as f: self.families = json.load(f) # Channel metadata with open(constants.CHANNEL_DEFINITIONS_PATH, 'r') as f: self.channel_metadata = json.load(f) # Dist/Release channel mapping with open(constants.CHANNEL_DIST_MAPPING_PATH, 'r') as f: self.channel_dist_mapping = json.load(f) # Channel to repositories mapping with open(constants.CONTENT_SOURCE_MAPPING_PATH, 'r') as f: self.content_source_mapping = json.load(f) # Kickstart metadata with open(constants.KICKSTART_DEFINITIONS_PATH, 'r') as f: self.kickstart_metadata = json.load(f) # Channel to kickstart repositories mapping with open(constants.KICKSTART_SOURCE_MAPPING_PATH, 'r') as f: self.kickstart_source_mapping = json.load(f) except IOError: e = sys.exc_info()[1] log2stderr(0, "ERROR: Problem with loading file: %s" % e) raise CdnMappingsLoadError() # Map channels to their channel family self.channel_to_family = {} for family in self.families: for channel in self.families[family]['channels']: self.channel_to_family[channel] = family # Set already synced channels h = rhnSQL.prepare(""" select label from rhnChannel where org_id is null """) h.execute() channels = h.fetchall_dict() or [] self.synced_channels = [ch['label'] for ch in channels] # Set SSL-keys for channel family self.family_keys = {}
def __init__(self): # Init log to stderr initLOG() self.start_time = 0 self._cleanup()
def processCommandline(): options = [ Option('--sanity-only', action='store_true', help="confirm certificate sanity. Does not activate " + "the Red Hat Satellite locally or remotely."), Option('--ignore-expiration', action='store_true', help='execute regardless of the expiration ' + 'of the RHN Certificate (not recommended).'), Option('--ignore-version-mismatch', action='store_true', help='execute regardless of version ' + 'mismatch of existing and new certificate.'), Option('-v', '--verbose', action='count', help='be verbose ' + '(accumulable: -vvv means "be *really* verbose").'), Option('--dump-version', action='store', help="requested version of XML dump"), Option('--manifest', action='store', help='the RHSM manifest path/filename to activate for CDN'), Option('--rhn-cert', action='store', help='this option is deprecated, use --manifest instead'), Option('--deactivate', action='store_true', help='deactivate CDN-activated Satellite'), Option('--disconnected', action='store_true', help="activate locally, not subscribe to remote repository"), Option('--manifest-info', action='store_true', help="show information about currently activated manifest"), Option('--manifest-download', action='store_true', help="download new manifest from RHSM to temporary location"), Option('--manifest-refresh', action='store_true', help="download new manifest from RHSM and activate it"), Option('--manifest-reconcile-request', action='store_true', help="request regeneration of entitlement certificates") ] parser = OptionParser(option_list=options) options, args = parser.parse_args() initCFG('server.satellite') if options.verbose is None: options.verbose = 0 CFG.set('DEBUG', options.verbose) rhnLog.initLOG(LOG_PATH, options.verbose) log2disk(0, "Command: %s" % str(sys.argv)) # we take no extra commandline arguments that are not linked to an option if args: writeError( "These arguments make no sense in this context (try --help): %s" % repr(args)) sys.exit(1) # No need to check further if deactivating if options.deactivate: return options if options.sanity_only: options.disconnected = 1 if options.manifest_refresh: options.manifest_download = 1 if CFG.DISCONNECTED and not options.disconnected: msg = """Satellite server has been setup to run in disconnected mode. Either correct server configuration in /etc/rhn/rhn.conf or use --disconnected to activate it locally.""" writeError(msg) sys.exit(1) options.http_proxy = idn_ascii_to_puny(CFG.HTTP_PROXY) options.http_proxy_username = CFG.HTTP_PROXY_USERNAME options.http_proxy_password = CFG.HTTP_PROXY_PASSWORD log(1, 'HTTP_PROXY: %s' % options.http_proxy) log(1, 'HTTP_PROXY_USERNAME: %s' % options.http_proxy_username) log(1, 'HTTP_PROXY_PASSWORD: <password>') return options
# import sys if len(sys.argv) != 3: print "Usage: %s server_id action_id" % sys.argv[0] sys.exit(1) system_id = sys.argv[1] action_id = sys.argv[2] from spacewalk.common.rhnLog import initLOG from spacewalk.server import rhnSQL from spacewalk.server.action_extra_data import packages initLOG("stderr", 4) rhnSQL.initDB("rhnuser/rhnuser@webdev") try: packages.verify(system_id, action_id, { 'verify_info': [ [['up2date', '2.9.1', '1.2.1AS', '', 'i386'], [ 'SM5..UGT c /etc/sysconfig/rhn/up2date', '..?..... c /etc/sysconfig/rhn/up2date-keyring.gpg', 'S.5....T /usr/share/rhn/up2date_client/bootloadercfg.pyc', 'S.5....T /usr/share/rhn/up2date_client/capabilities.pyc', 'S.5....T /usr/share/rhn/up2date_client/checkbootloader.pyc', 'S.5....T /usr/share/rhn/up2date_client/clap.pyc', 'S.5....T /usr/share/rhn/up2date_client/clientCaps.pyc', 'SM5....T /usr/share/rhn/up2date_client/config.pyc', 'S.5....T /usr/share/rhn/up2date_client/depSolver.pyc',
def __init__(self, channel_label, repo_type, url=None, fail=False, filters=None, no_errata=False, sync_kickstart=False, latest=False, metadata_only=False, strict=0, excluded_urls=None, no_packages=False, log_dir="reposync", log_level=None, force_kickstart=False, force_all_errata=False, check_ssl_dates=False, force_null_org_content=False): self.regen = False self.fail = fail self.filters = filters or [] self.no_packages = no_packages self.no_errata = no_errata self.sync_kickstart = sync_kickstart self.force_all_errata = force_all_errata self.force_kickstart = force_kickstart self.latest = latest self.metadata_only = metadata_only self.ks_tree_type = 'externally-managed' self.ks_install_type = None initCFG('server.satellite') rhnSQL.initDB() # setup logging log_filename = channel_label + '.log' log_path = default_log_location + log_dir + '/' + log_filename if log_level is None: log_level = 0 CFG.set('DEBUG', log_level) rhnLog.initLOG(log_path, log_level) # os.fchown isn't in 2.4 :/ if isSUSE(): os.system("chgrp www " + log_path) else: os.system("chgrp apache " + log_path) log2disk(0, "Command: %s" % str(sys.argv)) log2disk(0, "Sync of channel started.") self.channel_label = channel_label self.channel = self.load_channel() if not self.channel: log(0, "Channel %s does not exist." % channel_label) if not self.channel['org_id'] or force_null_org_content: self.org_id = None else: self.org_id = int(self.channel['org_id']) if not url: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.id, s.source_url, s.label from rhnContentSource s, rhnChannelContentSource cs where s.id = cs.source_id and cs.channel_id = :channel_id""") h.execute(channel_id=int(self.channel['id'])) source_data = h.fetchall_dict() self.urls = [] if excluded_urls is None: excluded_urls = [] if source_data: for row in source_data: if row['source_url'] not in excluded_urls: self.urls.append( (row['id'], row['source_url'], row['label'])) else: self.urls = [(None, u, None) for u in url] if not self.urls: log2(0, 0, "Channel %s has no URL associated" % channel_label, stream=sys.stderr) self.repo_plugin = self.load_plugin(repo_type) self.strict = strict self.all_packages = [] self.check_ssl_dates = check_ssl_dates # Init cache for computed checksums to not compute it on each reposync run again self.checksum_cache = rhnCache.get(checksum_cache_filename) if self.checksum_cache is None: self.checksum_cache = {}
def __init__(self, no_packages=False, no_errata=False, no_rpms=False, no_kickstarts=False, log_level=None, mount_point=None, consider_full=False, force_kickstarts=False, force_all_errata=False, email=False, import_batch_size=None): if log_level is None: log_level = 0 self.log_level = log_level CFG.set('DEBUG', log_level) self.email = email if self.email: initEMAIL_LOG() rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Command: %s" % str(sys.argv)) rhnSQL.initDB() initCFG('server.satellite') self.cdn_repository_manager = CdnRepositoryManager(mount_point) self.no_packages = no_packages self.no_errata = no_errata self.no_rpms = no_rpms if self.no_packages and self.no_rpms: log(0, "Parameter --no-rpms has no effect.") self.no_kickstarts = no_kickstarts self.force_all_errata = force_all_errata self.force_kickstarts = force_kickstarts if self.no_kickstarts and self.force_kickstarts: log(0, "Parameter --force-kickstarts has no effect.") if mount_point: self.mount_point = "file://" + mount_point self.consider_full = consider_full else: self.mount_point = CFG.CDN_ROOT self.consider_full = True verify_mappings() f = None # try block in try block - this is hack for python 2.4 compatibility # to support finally try: try: # Channel families mapping to channels f = open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') self.families = json.load(f) f.close() # Channel metadata f = open(constants.CHANNEL_DEFINITIONS_PATH, 'r') self.channel_metadata = json.load(f) f.close() # Dist/Release channel mapping f = open(constants.CHANNEL_DIST_MAPPING_PATH, 'r') self.channel_dist_mapping = json.load(f) f.close() # Kickstart metadata f = open(constants.KICKSTART_DEFINITIONS_PATH, 'r') self.kickstart_metadata = json.load(f) f.close() except IOError: e = sys.exc_info()[1] log(1, "Ignoring channel mappings: %s" % e) self.families = {} self.channel_metadata = {} self.channel_dist_mapping = {} self.kickstart_metadata = {} finally: if f is not None: f.close() # Map channels to their channel family self.channel_to_family = {} for family in self.families: for channel in self.families[family]['channels']: self.channel_to_family[channel] = family # Set already synced channels, entitled null-org channels and custom channels with associated # CDN repositories h = rhnSQL.prepare(""" select distinct c.label, c.org_id from rhnChannelFamilyPermissions cfp inner join rhnChannelFamily cf on cfp.channel_family_id = cf.id inner join rhnChannelFamilyMembers cfm on cf.id = cfm.channel_family_id inner join rhnChannel c on cfm.channel_id = c.id where c.org_id is null or (c.org_id is not null and exists ( select cs.id from rhnContentSource cs inner join rhnChannelContentSource ccs on ccs.source_id = cs.id where ccs.channel_id = c.id and cs.org_id is null ) ) order by c.org_id nulls first, label """) h.execute() channels = h.fetchall_dict() or [] self.synced_channels = {} for channel in channels: # Custom channel repositories not available, don't mark as synced if channel['org_id']: repos = self.cdn_repository_manager.list_associated_repos( channel['label']) if not all([ self.cdn_repository_manager. check_repository_availability(r) for r in repos ]): continue self.synced_channels[channel['label']] = channel['org_id'] # Select available channel families from DB h = rhnSQL.prepare(""" select distinct label from rhnChannelFamilyPermissions cfp inner join rhnChannelFamily cf on cfp.channel_family_id = cf.id where cf.org_id is null """) h.execute() families = h.fetchall_dict() or [] self.entitled_families = [f['label'] for f in families] self.import_batch_size = import_batch_size
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] errata = None if options.errata: errata = set(options.errata) for channel_list in options.channels: parents = None if options.parents: # if only the dest parent is specified, look up the src parent if len(options.parents) == 1: src_parent = xmlrpc.get_original(options.parents[0]) if not src_parent: print ("Channel %s is not a cloned channel." % options.parents[0]) sys.exit(1) print "Looking up the original channel for %s, %s found" % ( options.parents[0], src_parent) options.parents = [src_parent] + options.parents # options.parents is only set by command line, this must be the # only channel tree parents = options.parents # Handle the new-style channel specification that uses # key value pairs. Transform into channel / parent setup that # ChannelTreeCloner expects. This code has to be here now that you can # specify parents for multiple trees. # TODO: the channel / parents structure needs to be cleaned up throught # clone-by-date. Probably best thing would to make everywhere use the # dict structure instead of the list structure. for src_channel in channel_list.keys(): dest_channel = channel_list[src_channel] # new-style config file channel specification if type(dest_channel) == dict: if 'label' not in dest_channel: raise UserError("You must specify a label for the clone of %s" % src_channel) label = dest_channel['label'] if 'name' in dest_channel: name = dest_channel['name'] else: name = label if 'summary' in dest_channel: summary = dest_channel['summary'] else: summary = label if 'description' in dest_channel: description = dest_channel['description'] else: description = label # This is the options.parents equivalent for config files. # Add channels to parents option and remove from channels. if ('existing-parent-do-not-modify' in dest_channel and dest_channel['existing-parent-do-not-modify']): parents = [src_channel, label] del channel_list[src_channel] else: # else tranform channel_list entry to the list format channel_list[src_channel] = [label, name, summary, description] # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) # ensure the parent's channel metadata is available if parents: for label in parents: if not os.path.exists(repodata(label)): raise UserRepoError(label) # if cloning specific errata validate that they actually exist # in the original channels if options.errata: for channel in channel_labels: channel_errata = set(xmlrpc.list_errata(channel)) if len(errata - channel_errata) != 0: print ("Error: all errata specified with --errata must " + "exist in every original channel cloned in " + "this operation.") print ("Channel %s does not contain these errata: %s" % (channel, errata - channel_errata)) sys.exit(1) tree_cloner = ChannelTreeCloner(channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.security_only, options.use_update_date, options.no_errata_sync, errata, parents) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() if options.dry_run: for tree_cloner in cloners: d_errata = {} separator = "|" d_errata = tree_cloner.get_errata_to_clone() now = datetime.datetime.now() for ch in d_errata: log_file = ch + "_" + now.strftime("%Y-%m-%d-%H:%M") print "# Log file: " + log_file fh = open(log_file, 'w') for errata in d_errata[ch]: line = "" for item in list(set(errata) - set(['id'])): line = line + str(errata[item]) + separator fh.write(line + "\n") fh.close() sys.exit(0) print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print ("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] errata = None if options.errata: errata = set(options.errata) for channel_list in options.channels: parents = None if options.parents: # if only the dest parent is specified, look up the src parent if len(options.parents) == 1: src_parent = xmlrpc.get_original(options.parents[0]) if not src_parent: print("Channel %s is not a cloned channel." % options.parents[0]) sys.exit(1) print "Looking up the original channel for %s, %s found" % ( options.parents[0], src_parent) options.parents = [src_parent] + options.parents # options.parents is only set by command line, this must be the # only channel tree parents = options.parents # Handle the new-style channel specification that uses # key value pairs. Transform into channel / parent setup that # ChannelTreeCloner expects. This code has to be here now that you can # specify parents for multiple trees. # TODO: the channel / parents structure needs to be cleaned up throught # clone-by-date. Probably best thing would to make everywhere use the # dict structure instead of the list structure. for src_channel in channel_list.keys(): dest_channel = channel_list[src_channel] # new-style config file channel specification if type(dest_channel) == dict: if 'label' not in dest_channel: raise UserError( "You must specify a label for the clone of %s" % src_channel) label = dest_channel['label'] if 'name' in dest_channel: name = dest_channel['name'] else: name = label if 'summary' in dest_channel: summary = dest_channel['summary'] else: summary = label if 'description' in dest_channel: description = dest_channel['description'] else: description = label # This is the options.parents equivalent for config files. # Add channels to parents option and remove from channels. if ('existing-parent-do-not-modify' in dest_channel and dest_channel['existing-parent-do-not-modify']): parents = [src_channel, label] del channel_list[src_channel] else: # else tranform channel_list entry to the list format channel_list[src_channel] = [ label, name, summary, description ] # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) # ensure the parent's channel metadata is available if parents: for label in parents: if not os.path.exists(repodata(label)): raise UserRepoError(label) # if cloning specific errata validate that they actually exist # in the original channels if options.errata: for channel in channel_labels: channel_errata = set(xmlrpc.list_errata(channel)) if len(errata - channel_errata) != 0: print("Error: all errata specified with --errata must " + "exist in every original channel cloned in " + "this operation.") print("Channel %s does not contain these errata: %s" % (channel, errata - channel_errata)) sys.exit(1) tree_cloner = ChannelTreeCloner( channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync, errata, parents) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() if options.dry_run: for tree_cloner in cloners: d_errata = {} separator = "|" d_errata = tree_cloner.get_errata_to_clone() now = datetime.datetime.now() for ch in d_errata: log_file = ch + "_" + now.strftime("%Y-%m-%d-%H:%M") print "# Log file: " + log_file fh = open(log_file, 'w') for errata in d_errata[ch]: line = "" for item in list(set(errata) - set(['id'])): line = line + str(errata[item]) + separator fh.write(line + "\n") fh.close() sys.exit(0) print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def main(self): initCFG('server') db_string = CFG.DEFAULT_DB #"rhnsat/rhnsat@rhnsat" rhnSQL.initDB(db_string) (options, args) = self.process_args() log_filename = 'reposync.log' if options.channel_label: date = time.localtime() datestr = '%d.%02d.%02d-%02d:%02d:%02d' % (date.tm_year, date.tm_mon, date.tm_mday, date.tm_hour, date.tm_min, date.tm_sec) log_filename = options.channel_label + '-' + datestr + '.log' rhnLog.initLOG(default_log_location + log_filename) #os.fchown isn't in 2.4 :/ os.system("chgrp apache " + default_log_location + log_filename) quit = False if not options.url: if options.channel_label: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.source_url from rhnContentSource s, rhnChannelContentSource cs, rhnChannel c where s.id = cs.source_id and cs.channel_id = c.id and c.label = :label""") h.execute(label=options.channel_label) source_urls = h.fetchall_dict() or [] if source_urls: self.urls = [row['source_url'] for row in source_urls] else: quit = True self.error_msg("Channel has no URL associated") else: self.urls = [options.url] if not options.channel_label: quit = True self.error_msg("--channel must be specified") self.log_msg("\nSync started: %s" % (time.asctime(time.localtime()))) self.log_msg(str(sys.argv)) if quit: sys.exit(1) self.type = options.type self.channel_label = options.channel_label self.fail = options.fail self.quiet = options.quiet self.channel = self.load_channel() if not self.channel or not rhnChannel.isCustomChannel(self.channel['id']): print "Channel does not exist or is not custom" sys.exit(1) for url in self.urls: plugin = self.load_plugin()(url, self.channel_label) self.import_packages(plugin, url) if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") self.update_date() rhnSQL.commit() self.print_msg("Sync complete")
def __init__(self, debug, logfile): rhnLog.initLOG(logfile, int(debug))
import os import shutil from optparse import Option, OptionParser from spacewalk.common.rhnLog import initLOG, rhnLog from spacewalk.common.rhnConfig import CFG, initCFG from spacewalk.common import rhn_rpm from spacewalk.server.rhnLib import parseRPMFilename, get_package_path from spacewalk.server import rhnSQL, rhnPackageUpload from spacewalk.server.rhnServer import server_packages from spacewalk.satellite_tools.progress_bar import ProgressBar from spacewalk.common.checksum import getFileChecksum from spacewalk.server.importlib import mpmSource initCFG('server.satellite') initLOG(CFG.LOG_FILE, CFG.DEBUG) OPTIONS = None debug = 0 verbose = 0 options_table = [ Option("--update-package-files", action="store_true", help="Update package files (bugs #659348, #652852)"), Option("--update-sha256", action="store_true", help="Update SHA-256 capable packages"), Option("--update-filer", action="store_true", help="Convert filer structure"), Option("--update-kstrees", action="store_true", help="Fix kickstart trees permissions"), Option("--update-changelog", action="store_true",
import os import shutil from optparse import Option, OptionParser from spacewalk.common.rhnLog import initLOG, rhnLog from spacewalk.common.rhnConfig import CFG, initCFG from spacewalk.common import rhn_rpm from spacewalk.server.rhnLib import parseRPMFilename, get_package_path from spacewalk.server import rhnSQL, rhnPackageUpload from spacewalk.server.rhnServer import server_packages from spacewalk.satellite_tools.progress_bar import ProgressBar from spacewalk.common.checksum import getFileChecksum from spacewalk.server.importlib import mpmSource initCFG('server.satellite') initLOG(CFG.LOG_FILE, CFG.DEBUG) OPTIONS = None debug = 0 verbose = 0 options_table = [ Option("--update-package-files", action="store_true", help="Update package files (bugs #659348, #652852)"), Option("--update-sha256", action="store_true", help="Update SHA-256 capable packages"), Option("--update-filer", action="store_true", help="Convert filer structure"),
# implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # from spacewalk.server import rhnSQL from spacewalk.common import rhnLog from spacewalk.server.action_extra_data import solarispkgs if __name__ == '__main__': rhnSQL.initDB('satdev/satdev@citisat') rhnLog.initLOG(log_file="stderr", level=5) solarispkgs.install(1000010033, 69, { 'name' : 'XXX', 'version' : 0, 'status' : [ [['SMCsudo', '1.6.7p5', '0', 'sparc-solaris',], [1, "", ""]], [['SFWungif', '4.1.0', '2001.05.21.04.41', 'sparc-solaris',], [1, "out1", "error1"]], [['SMCtracer', '1.4a12', '0', 'sparc-solaris',], [1, "out1", "error1"]], [['SMCrcs', '5.7', '0', 'sparc-solaris',], [2, "out2", "error2"]], [['SMCngrep', '1.40.1', '0', 'sparc-solaris',], [3, "out3", "error3"]],
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = set(self.synced_channels) # Check channel availability before doing anything not_available = set() available = set() all_channel_list = None for channel in channels: # Try to expand wildcards in channel labels if '*' in channel or '?' in channel or '[' in channel: if all_channel_list is None: all_channel_list = self._list_available_channels() + [ c for c in self.synced_channels if self.synced_channels[c] ] expanded = fnmatch.filter(all_channel_list, channel) log( 2, "Expanding channel '%s' to: %s" % (channel, ", ".join(expanded))) if expanded: for expanded_channel in expanded: if not self._is_channel_available(expanded_channel): not_available.add(expanded_channel) else: available.add(expanded_channel) else: not_available.add(channel) elif not self._is_channel_available(channel): not_available.add(channel) else: available.add(channel) channels = available error_messages = [] # if we have not_available channels log the error immediately if not_available: msg = "ERROR: these channels either do not exist or are not available for synchronization:\n " + \ "\n ".join(not_available) error_messages.append(msg) # BZ 1434913 - let user know if system is not activated if no available channels if not available: error_messages.extend(self._msg_array_if_not_activated()) # Need to update channel metadata self._update_channels_metadata( [ch for ch in channels if ch in self.channel_metadata]) # Make sure custom channels are properly connected with repos for channel in channels: if channel in self.synced_channels and self.synced_channels[ channel]: self.cdn_repository_manager.assign_repositories_to_channel( channel) reposync.clear_ssl_cache() # Finally, sync channel content total_time = timedelta() for channel in channels: cur_time, failed_packages = self._sync_channel(channel) if failed_packages < 0: error_messages.append( "Problems occurred during syncing channel %s. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (channel, channel)) if failed_packages > 0: error_messages.append( "%d packages in channel %s failed to sync. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (failed_packages, channel, channel)) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0]) return error_messages
def processCommandline(): options = [ Option('--sanity-only', action='store_true', help="confirm certificate sanity. Does not activate " + "the Red Hat Satellite locally or remotely."), Option('--ignore-expiration', action='store_true', help='execute regardless of the expiration ' + 'of the RHN Certificate (not recommended).'), Option('--ignore-version-mismatch', action='store_true', help='execute regardless of version ' + 'mismatch of existing and new certificate.'), Option('-v', '--verbose', action='count', help='be verbose ' + '(accumulable: -vvv means "be *really* verbose").'), Option('--dump-version', action='store', help="requested version of XML dump"), Option('--manifest', action='store', help='the RHSM manifest path/filename to activate for CDN'), Option('--rhn-cert', action='store', help='this option is deprecated, use --manifest instead'), Option('--deactivate', action='store_true', help='deactivate CDN-activated Satellite'), Option('--disconnected', action='store_true', help="activate locally, not subscribe to remote repository"), Option('--manifest-info', action='store_true', help="show information about currently activated manifest"), Option('--manifest-download', action='store_true', help="download new manifest from RHSM to temporary location"), Option('--manifest-refresh', action='store_true', help="download new manifest from RHSM and activate it"), Option('--manifest-reconcile-request', action='store_true', help="request regeneration of entitlement certificates") ] parser = OptionParser(option_list=options) options, args = parser.parse_args() initCFG('server.satellite') if options.verbose is None: options.verbose = 0 CFG.set('DEBUG', options.verbose) rhnLog.initLOG(LOG_PATH, options.verbose) log2disk(0, "Command: %s" % str(sys.argv)) # we take no extra commandline arguments that are not linked to an option if args: writeError("These arguments make no sense in this context (try --help): %s" % repr(args)) sys.exit(1) # No need to check further if deactivating if options.deactivate: return options if options.sanity_only: options.disconnected = 1 if options.manifest_refresh: options.manifest_download = 1 if CFG.DISCONNECTED and not options.disconnected: msg = """Satellite server has been setup to run in disconnected mode. Either correct server configuration in /etc/rhn/rhn.conf or use --disconnected to activate it locally.""" writeError(msg) sys.exit(1) options.http_proxy = idn_ascii_to_puny(CFG.HTTP_PROXY) options.http_proxy_username = CFG.HTTP_PROXY_USERNAME options.http_proxy_password = CFG.HTTP_PROXY_PASSWORD log(1, 'HTTP_PROXY: %s' % options.http_proxy) log(1, 'HTTP_PROXY_USERNAME: %s' % options.http_proxy_username) log(1, 'HTTP_PROXY_PASSWORD: <password>') return options
""" % epochStatement h = rhnSQL.prepare(statement) h.execute(**params) ret = h.fetchone_dict() if not ret: return {'path': None, 'channel_id': None, 'checksum_type': None, 'checksum': None, } return ret def _none2emptyString(foo): if foo is None: return "" return str(foo) if __name__ == '__main__': """Test code. """ from spacewalk.common.rhnLog import initLOG initLOG("stdout", 1) rhnSQL.initDB() print # new client print get_package_path(1000463284, 'kernel-2.4.2-2.i686.rpm', 'redhat-linux-i386-7.1') print get_source_package_path(1000463284, 'kernel-2.4.2-2.i686.rpm', 'redhat-linux-i386-7.1')
#!/usr/bin/python2 # # Copyright (c) 2008--2018 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # from spacewalk.common import rhnLog from spacewalk.server import rhnImport rhnLog.initLOG(level=4) dir = "spacewalk/server/handlers" for i in range(2): for iface in ['rpcClasses', 'getHandler']: m = rhnImport.load(dir, interface_signature=iface)
def main(options): xmlrpc = RemoteApi(options.server, options.username, options.password) db = DBApi() initCFG('server') rhnLog.initLOG(LOG_LOCATION) cleansed = vars(options) cleansed["password"] = "******" log_clean(0, "") log_debug(0, "Started spacewalk-clone-by-date") log_clean(0, pprint.pformat(cleansed)) print "Reading repository information." if options.use_update_date: options.use_update_date = 'update_date' else: options.use_update_date = 'issue_date' print "Using %s." % options.use_update_date cloners = [] needed_channels = [] for channel_list in options.channels: # before we start make sure we can get repodata for all channels # involved. channel_labels = channel_list.keys() for label in channel_labels: if not os.path.exists(repodata(label)): raise UserRepoError(label) if options.parents: tree_cloner = ChannelTreeCloner( channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync, options.parents) else: tree_cloner = ChannelTreeCloner( channel_list, xmlrpc, db, options.to_date, options.blacklist, options.removelist, options.background, options.security_only, options.use_update_date, options.no_errata_sync) cloners.append(tree_cloner) needed_channels += tree_cloner.needing_create().values() if options.validate: if len(needed_channels) > 0: raise UserError("Cannot validate channels that do not exist %s" % ', '.join(map(str, needed_channels))) for channel_list in options.channels: validate(channel_list.values()) return if len(needed_channels) > 0: print "\nBy continuing the following channels will be created: " print ", ".join(needed_channels) confirm("\nContinue with channel creation (y/n)?", options) for cloner in cloners: cloner.create_channels(options.skip_depsolve) for tree_cloner in cloners: tree_cloner.prepare() print "\nBy continuing the following will be cloned:" total = 0 for cloner in cloners: cloner.pre_summary() total += cloner.pending() if total == 0: print("\nNo errata to clone, checking removelist.") for cloner in cloners: cloner.remove_packages() sys.exit(0) confirm("\nContinue with clone (y/n)?", options) for cloner in cloners: cloner.clone(options.skip_depsolve) cloner.remove_packages()
def __init__(self, no_packages=False, no_errata=False, no_rpms=False, no_kickstarts=False, log_level=None, mount_point=None, consider_full=False): self.cdn_repository_manager = CdnRepositoryManager(mount_point) self.no_packages = no_packages self.no_errata = no_errata self.no_rpms = no_rpms self.no_kickstarts = no_kickstarts if log_level is None: log_level = 0 self.log_level = log_level if mount_point: self.mount_point = "file://" + mount_point self.consider_full = consider_full else: self.mount_point = CFG.CDN_ROOT self.consider_full = True CFG.set('DEBUG', log_level) rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Command: %s" % str(sys.argv)) rhnSQL.initDB() initCFG('server.satellite') verify_mappings() f = None # try block in try block - this is hack for python 2.4 compatibility # to support finally try: try: # Channel families mapping to channels f = open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') self.families = json.load(f) f.close() # Channel metadata f = open(constants.CHANNEL_DEFINITIONS_PATH, 'r') self.channel_metadata = json.load(f) f.close() # Dist/Release channel mapping f = open(constants.CHANNEL_DIST_MAPPING_PATH, 'r') self.channel_dist_mapping = json.load(f) f.close() # Kickstart metadata f = open(constants.KICKSTART_DEFINITIONS_PATH, 'r') self.kickstart_metadata = json.load(f) f.close() except IOError: e = sys.exc_info()[1] raise CdnMappingsLoadError("Problem with loading file: %s" % e) finally: if f is not None: f.close() # Map channels to their channel family self.channel_to_family = {} for family in self.families: for channel in self.families[family]['channels']: self.channel_to_family[channel] = family # Set already synced channels h = rhnSQL.prepare(""" select label from rhnChannel where org_id is null """) h.execute() channels = h.fetchall_dict() or [] self.synced_channels = [ch['label'] for ch in channels]
p.id desc """ % (epochStatement, orgStatement) h = rhnSQL.prepare(statement) h.execute(**params) ret = h.fetchone_dict() return ret def _none2emptyString(foo): if foo is None: return "" return str(foo) if __name__ == '__main__': """Test code. """ from spacewalk.common.rhnLog import initLOG initLOG("stdout", 1) rhnSQL.initDB() print("") # new client print( get_package_path(1000463284, 'kernel-2.4.2-2.i686.rpm', 'redhat-linux-i386-7.1')) print( get_source_package_path(1000463284, 'kernel-2.4.2-2.i686.rpm', 'redhat-linux-i386-7.1'))
def __init__(self, channel_label, repo_type, url=None, fail=False, filters=None, no_errata=False, sync_kickstart=False, latest=False, metadata_only=False, strict=0, excluded_urls=None, no_packages=False, log_dir="reposync", log_level=None): self.regen = False self.fail = fail self.filters = filters or [] self.no_packages = no_packages self.no_errata = no_errata self.sync_kickstart = sync_kickstart self.latest = latest self.metadata_only = metadata_only self.ks_tree_type = 'externally-managed' self.ks_install_type = 'generic_rpm' initCFG('server.satellite') rhnSQL.initDB() # setup logging log_filename = channel_label + '.log' log_path = default_log_location + log_dir + '/' + log_filename if log_level is None: log_level = 0 CFG.set('DEBUG', log_level) rhnLog.initLOG(log_path, log_level) # os.fchown isn't in 2.4 :/ if isSUSE(): os.system("chgrp www " + log_path) else: os.system("chgrp apache " + log_path) log2disk(0, "Command: %s" % str(sys.argv)) log2disk(0, "Sync of channel started.") self.channel_label = channel_label self.channel = self.load_channel() if not self.channel: log(0, "Channel %s does not exist." % channel_label) if not url: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.id, s.source_url, s.label, fm.channel_family_id from rhnContentSource s, rhnChannelContentSource cs, rhnChannelFamilyMembers fm where s.id = cs.source_id and cs.channel_id = fm.channel_id and cs.channel_id = :channel_id""") h.execute(channel_id=int(self.channel['id'])) source_data = h.fetchall_dict() self.urls = [] if excluded_urls is None: excluded_urls = [] if source_data: for row in source_data: if row['source_url'] not in excluded_urls: self.urls.append((row['id'], row['source_url'], row['label'], row['channel_family_id'])) else: self.urls = [(None, u, None, None) for u in url] if not self.urls: log2stderr(0, "Channel %s has no URL associated" % channel_label) self.repo_plugin = self.load_plugin(repo_type) self.strict = strict self.all_packages = []
def __init__(self, no_packages=False, no_errata=False, no_rpms=False, no_kickstarts=False, log_level=None, mount_point=None, consider_full=False, force_kickstarts=False, force_all_errata=False, email=False, import_batch_size=None): if log_level is None: log_level = 0 self.log_level = log_level CFG.set('DEBUG', log_level) self.email = email if self.email: initEMAIL_LOG() rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Command: %s" % str(sys.argv)) rhnSQL.initDB() initCFG('server.satellite') self.cdn_repository_manager = CdnRepositoryManager(mount_point) self.no_packages = no_packages self.no_errata = no_errata self.no_rpms = no_rpms if self.no_packages and self.no_rpms: log(0, "Parameter --no-rpms has no effect.") self.no_kickstarts = no_kickstarts self.force_all_errata = force_all_errata self.force_kickstarts = force_kickstarts if self.no_kickstarts and self.force_kickstarts: log(0, "Parameter --force-kickstarts has no effect.") if mount_point: self.mount_point = "file://" + mount_point self.consider_full = consider_full else: self.mount_point = CFG.CDN_ROOT self.consider_full = True verify_mappings() f = None # try block in try block - this is hack for python 2.4 compatibility # to support finally try: try: # Channel families mapping to channels f = open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') self.families = json.load(f) f.close() # Channel metadata f = open(constants.CHANNEL_DEFINITIONS_PATH, 'r') self.channel_metadata = json.load(f) f.close() # Dist/Release channel mapping f = open(constants.CHANNEL_DIST_MAPPING_PATH, 'r') self.channel_dist_mapping = json.load(f) f.close() # Kickstart metadata f = open(constants.KICKSTART_DEFINITIONS_PATH, 'r') self.kickstart_metadata = json.load(f) f.close() except IOError: e = sys.exc_info()[1] log(1, "Ignoring channel mappings: %s" % e) self.families = {} self.channel_metadata = {} self.channel_dist_mapping = {} self.kickstart_metadata = {} finally: if f is not None: f.close() # Map channels to their channel family self.channel_to_family = {} for family in self.families: for channel in self.families[family]['channels']: self.channel_to_family[channel] = family # Set already synced channels, entitled null-org channels and custom channels with associated # CDN repositories h = rhnSQL.prepare(""" select distinct c.label, c.org_id from rhnChannelFamilyPermissions cfp inner join rhnChannelFamily cf on cfp.channel_family_id = cf.id inner join rhnChannelFamilyMembers cfm on cf.id = cfm.channel_family_id inner join rhnChannel c on cfm.channel_id = c.id where c.org_id is null or (c.org_id is not null and exists ( select cs.id from rhnContentSource cs inner join rhnChannelContentSource ccs on ccs.source_id = cs.id where ccs.channel_id = c.id and cs.org_id is null ) ) order by c.org_id nulls first, label """) h.execute() channels = h.fetchall_dict() or [] self.synced_channels = {} for channel in channels: # Custom channel repositories not available, don't mark as synced if channel['org_id']: repos = self.cdn_repository_manager.list_associated_repos(channel['label']) if not all([self.cdn_repository_manager.check_repository_availability(r) for r in repos]): continue self.synced_channels[channel['label']] = channel['org_id'] # Select available channel families from DB h = rhnSQL.prepare(""" select distinct label from rhnChannelFamilyPermissions cfp inner join rhnChannelFamily cf on cfp.channel_family_id = cf.id where cf.org_id is null """) h.execute() families = h.fetchall_dict() or [] self.entitled_families = [f['label'] for f in families] self.import_batch_size = import_batch_size
def __init__(self, no_packages=False, no_errata=False, no_rpms=False, no_kickstarts=False, log_level=None): self.no_packages = no_packages self.no_errata = no_errata self.no_rpms = no_rpms self.no_kickstarts = no_kickstarts if log_level is None: log_level = 0 self.log_level = log_level CFG.set('DEBUG', log_level) rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Command: %s" % str(sys.argv)) rhnSQL.initDB() initCFG('server.satellite') f = None # try block in try block - this is hack for python 2.4 compatibility # to support finally try: try: # Channel families mapping to channels f = open(constants.CHANNEL_FAMILY_MAPPING_PATH, 'r') self.families = json.load(f) f.close() # Channel metadata f = open(constants.CHANNEL_DEFINITIONS_PATH, 'r') self.channel_metadata = json.load(f) f.close() # Dist/Release channel mapping f = open(constants.CHANNEL_DIST_MAPPING_PATH, 'r') self.channel_dist_mapping = json.load(f) f.close() # Channel to repositories mapping f = open(constants.CONTENT_SOURCE_MAPPING_PATH, 'r') self.content_source_mapping = json.load(f) f.close() # Kickstart metadata f = open(constants.KICKSTART_DEFINITIONS_PATH, 'r') self.kickstart_metadata = json.load(f) f.close() # Channel to kickstart repositories mapping f = open(constants.KICKSTART_SOURCE_MAPPING_PATH, 'r') self.kickstart_source_mapping = json.load(f) f.close() except IOError: e = sys.exc_info()[1] log2stderr(0, "ERROR: Problem with loading file: %s" % e) raise CdnMappingsLoadError() finally: if f is not None: f.close() # Map channels to their channel family self.channel_to_family = {} for family in self.families: for channel in self.families[family]['channels']: self.channel_to_family[channel] = family # Set already synced channels h = rhnSQL.prepare(""" select label from rhnChannel where org_id is null """) h.execute() channels = h.fetchall_dict() or [] self.synced_channels = [ch['label'] for ch in channels] # Set SSL-keys for channel family self.family_keys = {}
def sync(self, channels=None): # If no channels specified, sync already synced channels if not channels: channels = set(self.synced_channels) # Check channel availability before doing anything not_available = set() available = set() all_channel_list = None for channel in channels: # Try to expand wildcards in channel labels if '*' in channel or '?' in channel or '[' in channel: if all_channel_list is None: all_channel_list = self._list_available_channels() + [c for c in self.synced_channels if self.synced_channels[c]] expanded = fnmatch.filter(all_channel_list, channel) log(2, "Expanding channel '%s' to: %s" % (channel, ", ".join(expanded))) if expanded: for expanded_channel in expanded: if not self._is_channel_available(expanded_channel): not_available.add(expanded_channel) else: available.add(expanded_channel) else: not_available.add(channel) elif not self._is_channel_available(channel): not_available.add(channel) else: available.add(channel) channels = available error_messages = [] # if we have not_available channels log the error immediately if not_available: msg = "ERROR: these channels either do not exist or are not available for synchronization:\n " + \ "\n ".join(not_available) error_messages.append(msg) # BZ 1434913 - let user know if system is not activated if no available channels if not available: error_messages.extend(self._msg_array_if_not_activated()) # Need to update channel metadata self._update_channels_metadata([ch for ch in channels if ch in self.channel_metadata]) # Make sure custom channels are properly connected with repos for channel in channels: if channel in self.synced_channels and self.synced_channels[channel]: self.cdn_repository_manager.assign_repositories_to_channel(channel) reposync.clear_ssl_cache() # Finally, sync channel content total_time = timedelta() for channel in channels: cur_time, failed_packages = self._sync_channel(channel) if failed_packages < 0: error_messages.append("Problems occurred during syncing channel %s. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (channel, channel)) if failed_packages > 0: error_messages.append("%d packages in channel %s failed to sync. Please check " "/var/log/rhn/cdnsync/%s.log for the details\n" % (failed_packages, channel, channel)) total_time += cur_time # Switch back to cdnsync log rhnLog.initLOG(self.log_path, self.log_level) log2disk(0, "Sync of channel completed.") log(0, "Total time: %s" % str(total_time).split('.')[0]) return error_messages