def submit(self): self.backend.lookupPackages(self.batch, self.checksums) try: affected_channels = self.backend.subscribeToChannels( self.batch, strict=self._strict_subscription) except: self.backend.rollback() raise self.compute_affected_channels(affected_channels) if len(self.batch) < 10: # update small batch per package name_ids = [pkg['name_id'] for pkg in self.batch] else: # update bigger batch at once name_ids = [] self.backend.update_newest_package_cache( caller=self.caller, affected_channels=self.affected_channel_packages, name_ids=name_ids) # Now that channel is updated, schedule the repo generation if self.repogen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( self.affected_channels, self.batch, self.caller) self.backend.commit()
def subscribeToChannels(self): affected_channels = self.backend.subscribeToChannels(self.batch) # Fill the list of affected channels self.compute_affected_channels(affected_channels) self.backend.update_newest_package_cache(caller=self.caller, affected_channels=self.affected_channel_packages) taskomatic.add_to_repodata_queue_for_channel_package_subscription( self.affected_channels, self.batch, self.caller ) self.backend.commit()
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label, channel_family_id) in self.urls: print("") self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentssl join rhncryptokey k1 on rhncontentssl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentssl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentssl.ssl_client_key_id = k3.id where rhncontentssl.content_source_id = :repo_id or rhncontentssl.channel_family_id = :channel_family_id """, repo_id=int(repo_id), channel_family_id=int(channel_family_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() total_time = datetime.now() - start_time self.print_msg("Sync completed.") self.print_msg("Total time: %s" % str(total_time).split('.')[0])
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: print() self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() total_time = datetime.now() - start_time self.print_msg("Sync completed.") self.print_msg("Total time: %s" % str(total_time).split('.')[0])
def subscribeToChannels(self): affected_channels = self.backend.subscribeToChannels(self.batch) # Fill the list of affected channels self.compute_affected_channels(affected_channels) name_ids = [pkg['name_id'] for pkg in self.batch] self.backend.update_newest_package_cache(caller=self.caller, affected_channels=self.affected_channel_packages, name_ids=name_ids) taskomatic.add_to_repodata_queue_for_channel_package_subscription( self.affected_channels, self.batch, self.caller) self.backend.commit()
def uploadPackages(info, source=0, force=0, caller=None): log_debug(4, source, force, caller) batch = Collection() packageList = info.get("packages") or [] if not packageList: raise Exception("Nothing to do") org_id = info.get('orgId') if org_id == '': org_id = None if source: channelList = [] else: channelList = info.get("channels") or [] for package in packageList: p = __processPackage(package, org_id, channelList, source) batch.append(p) if CFG.DB_BACKEND == ORACLE: from spacewalk.server.importlib.backendOracle import OracleBackend backend = OracleBackend() elif CFG.DB_BACKEND == POSTGRESQL: from spacewalk.server.importlib.backendOracle import PostgresqlBackend backend = PostgresqlBackend() backend.init() importer = packageImporter(batch, backend, source, caller=caller) importer.setUploadForce(force) importer.run() if not source: importer.subscribeToChannels() # Split the result in two lists - already uploaded and new packages newpkgs = [] uploaded = [] for pkg in importer.status(): if pkg.ignored or pkg.diff: uploaded.append(pkg) else: newpkgs.append(pkg) # Schedule an errata cache update only if we touched the channels if not source: # makes sense only for binary packages schedule_errata_cache_update(importer.affected_channels) taskomatic.add_to_repodata_queue_for_channel_package_subscription( importer.affected_channels, batch, caller) rhnSQL.commit() return _formatStatus(uploaded), _formatStatus(newpkgs)
def submit(self): self.backend.lookupPackages(self.batch, self.checksums) try: affected_channels = self.backend.subscribeToChannels(self.batch, strict=self._strict_subscription) except: self.backend.rollback() raise self.compute_affected_channels(affected_channels) self.backend.update_newest_package_cache(caller=self.caller, affected_channels=self.affected_channel_packages) # Now that channel is updated, schedule the repo generation if self.repogen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( self.affected_channels, self.batch, self.caller ) self.backend.commit()
def uploadPackages(info, source=0, force=0, caller=None): log_debug(4, source, force, caller) batch = Collection() packageList = info.get("packages") or [] if not packageList: raise Exception("Nothing to do") org_id = info.get('orgId') if org_id == '': org_id = None if source: channelList = [] else: channelList = info.get("channels") or [] for package in packageList: p = __processPackage(package, org_id, channelList, source) batch.append(p) backend = SQLBackend() importer = packageImporter(batch, backend, source, caller=caller) importer.setUploadForce(force) importer.run() if not source: importer.subscribeToChannels() # Split the result in two lists - already uploaded and new packages newpkgs = [] uploaded = [] for pkg in importer.status(): if pkg.ignored or pkg.diff: uploaded.append(pkg) else: newpkgs.append(pkg) # Schedule an errata cache update only if we touched the channels if not source: # makes sense only for binary packages schedule_errata_cache_update(importer.affected_channels) taskomatic.add_to_repodata_queue_for_channel_package_subscription( importer.affected_channels, batch, caller) rhnSQL.commit() return _formatStatus(uploaded), _formatStatus(newpkgs)
def submit(self): self.backend.lookupPackages(self.batch, self.checksums) try: affected_channels = self.backend.subscribeToChannels(self.batch, strict=self._strict_subscription) except: self.backend.rollback() raise self.compute_affected_channels(affected_channels) if len(self.batch) < 10: # update small batch per package name_ids = [pkg['name_id'] for pkg in self.batch] else: # update bigger batch at once name_ids = [] self.backend.update_newest_package_cache(caller=self.caller, affected_channels=self.affected_channel_packages, name_ids=name_ids) # Now that channel is updated, schedule the repo generation if self.repogen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( self.affected_channels, self.batch, self.caller) self.backend.commit()
def sync(self, update_repodata=True): """Trigger a reposync""" failed_packages = 0 sync_error = 0 if not self.urls: sync_error = -1 start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: if repo_label: repo_name = repo_label else: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) repo_name = relative_url.replace("?", "_").replace( "&", "_").replace("=", "_") plugin = self.repo_plugin(url, repo_name, org=str(self.org_id or ''), channel_label=self.channel_label) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchall_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsource cs inner join rhncontentsourcessl csssl on cs.id = csssl.content_source_id inner join rhncryptokey k1 on csssl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on csssl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on csssl.ssl_client_key_id = k3.id where cs.id = :repo_id """, repo_id=int(repo_id)) if keys: ssl_set = get_single_ssl_set( keys, check_dates=self.check_ssl_dates) if ssl_set: plugin.set_ssl_options(ssl_set['ca_cert'], ssl_set['client_cert'], ssl_set['client_key']) else: raise ValueError( "No valid SSL certificates were found for repository." ) if not self.no_packages: ret = self.import_packages(plugin, repo_id, url) failed_packages += ret self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2(0, 0, "ERROR: %s" % e, stream=sys.stderr) log2disk(0, "ERROR: %s" % e) # pylint: disable=W0104 sync_error = -1 if plugin is not None: plugin.clear_ssl_cache() # Update cache with package checksums rhnCache.set(checksum_cache_filename, self.checksum_cache) if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() # update permissions fileutils.createPath(os.path.join( CFG.MOUNT_POINT, 'rhn')) # if the directory exists update ownership only for root, dirs, files in os.walk(os.path.join(CFG.MOUNT_POINT, 'rhn')): for d in dirs: fileutils.setPermsPath(os.path.join(root, d), group='apache') for f in files: fileutils.setPermsPath(os.path.join(root, f), group='apache') elapsed_time = datetime.now() - start_time log( 0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) # if there is no global problems, but some packages weren't synced if sync_error == 0 and failed_packages > 0: sync_error = failed_packages return elapsed_time, sync_error
def sync(self, update_repodata=False): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label, channel_family_id) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) plugin_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_") plugin = self.repo_plugin(url, plugin_name) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentssl join rhncryptokey k1 on rhncontentssl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentssl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentssl.ssl_client_key_id = k3.id where rhncontentssl.content_source_id = :repo_id or rhncontentssl.channel_family_id = :channel_family_id """, repo_id=int(repo_id), channel_family_id=int(channel_family_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) if not self.no_packages: self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2stderr(0, "ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() elapsed_time = datetime.now() - start_time log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) return elapsed_time
class RepoSync(object): def __init__(self, channel_label, repo_type, url=None, fail=False, quiet=False, filters=None, no_errata=False, sync_kickstart=False, latest=False): self.regen = False self.fail = fail self.quiet = quiet self.filters = filters or [] self.no_errata = no_errata self.sync_kickstart = sync_kickstart self.latest = latest initCFG('server') rhnSQL.initDB() # setup logging log_filename = channel_label + '.log' rhnLog.initLOG(default_log_location + log_filename) # os.fchown isn't in 2.4 :/ os.system("chgrp apache " + default_log_location + log_filename) self.log_msg("\nSync started: %s" % (time.asctime(time.localtime()))) self.log_msg(str(sys.argv)) self.channel_label = channel_label self.channel = self.load_channel() if not self.channel or not rhnChannel.isCustomChannel( self.channel['id']): self.print_msg("Channel does not exist or is not custom.") sys.exit(1) if not url: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.id, s.source_url, s.label from rhnContentSource s, rhnChannelContentSource cs where s.id = cs.source_id and cs.channel_id = :channel_id""") h.execute(channel_id=int(self.channel['id'])) source_data = h.fetchall_dict() if source_data: self.urls = [(row['id'], row['source_url'], row['label']) for row in source_data] else: self.error_msg("Channel has no URL associated") sys.exit(1) else: self.urls = [(None, u, None) for u in url] self.repo_plugin = self.load_plugin(repo_type) def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: print self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id)) if keys and keys.has_key('ca_cert'): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception, e: self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() total_time = datetime.now() - start_time self.print_msg("Sync completed.") self.print_msg("Total time: %s" % str(total_time).split('.')[0])
def sync(self, update_repodata=False): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) plugin_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_") plugin = self.repo_plugin(url, plugin_name) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsource cs join rhncryptokey k1 on cs.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on cs.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on cs.ssl_client_key_id = k3.id where cs.id = :repo_id """, repo_id=int(repo_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) if not self.no_packages: self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2stderr(0, "ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() elapsed_time = datetime.now() - start_time log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) return elapsed_time
def main(self): initCFG('server') db_string = CFG.DEFAULT_DB #"rhnsat/rhnsat@rhnsat" rhnSQL.initDB(db_string) (options, args) = self.process_args() log_filename = 'reposync.log' if options.channel_label: date = time.localtime() datestr = '%d.%02d.%02d-%02d:%02d:%02d' % (date.tm_year, date.tm_mon, date.tm_mday, date.tm_hour, date.tm_min, date.tm_sec) log_filename = options.channel_label + '-' + datestr + '.log' rhnLog.initLOG(default_log_location + log_filename) #os.fchown isn't in 2.4 :/ os.system("chgrp apache " + default_log_location + log_filename) quit = False if not options.url: if options.channel_label: # TODO:need to look at user security across orgs h = rhnSQL.prepare("""select s.source_url from rhnContentSource s, rhnChannelContentSource cs, rhnChannel c where s.id = cs.source_id and cs.channel_id = c.id and c.label = :label""") h.execute(label=options.channel_label) source_urls = h.fetchall_dict() or [] if source_urls: self.urls = [row['source_url'] for row in source_urls] else: quit = True self.error_msg("Channel has no URL associated") else: self.urls = [options.url] if not options.channel_label: quit = True self.error_msg("--channel must be specified") self.log_msg("\nSync started: %s" % (time.asctime(time.localtime()))) self.log_msg(str(sys.argv)) if quit: sys.exit(1) self.type = options.type self.channel_label = options.channel_label self.fail = options.fail self.quiet = options.quiet self.channel = self.load_channel() if not self.channel or not rhnChannel.isCustomChannel(self.channel['id']): print "Channel does not exist or is not custom" sys.exit(1) for url in self.urls: plugin = self.load_plugin()(url, self.channel_label) self.import_packages(plugin, url) if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") self.update_date() rhnSQL.commit() self.print_msg("Sync complete")