def _channelPackageSubscription(self, authobj, info): # Authorize the org id passed authobj.authzOrg(info) packageList = info.get('packages') or [] if not packageList: log_debug(1, "No packages found; done") return 0 if 'channels' not in info or not info['channels']: log_debug(1, "No channels found; done") return 0 channelList = info['channels'] authobj.authzChannels(channelList) # Have to turn the channel list into a list of Channel objects channelList = [Channel().populate({'label': x}) for x in channelList] # Since we're dealing with superusers, we allow them to change the org # id # XXX check if we don't open ourselves too much (misa 20030422) org_id = info.get('orgId') if org_id == '': org_id = None batch = Collection() package_keys = ['name', 'version', 'release', 'epoch', 'arch'] for package in packageList: for k in package_keys: if k not in package: raise Exception("Missing key %s" % k) if k == 'epoch': if package[k] is not None: if package[k] == '': package[k] = None else: package[k] = str(package[k]) else: package[k] = str(package[k]) if package['arch'] == 'src' or package['arch'] == 'nosrc': # Source package - no reason to continue continue _checksum_sql_filter = "" if 'md5sum' in package: # for old rhnpush compatibility package['checksum_type'] = 'md5' package['checksum'] = package['md5sum'] exec_args = { 'name': package['name'], 'pkg_epoch': package['epoch'], 'pkg_version': package['version'], 'pkg_rel': package['release'], 'pkg_arch': package['arch'], 'orgid': org_id } if 'checksum' in package and CFG.ENABLE_NVREA: _checksum_sql_filter = """and c.checksum = :checksum and c.checksum_type = :checksum_type""" exec_args.update({ 'checksum_type': package['checksum_type'], 'checksum': package['checksum'] }) h = rhnSQL.prepare(self._get_pkg_info_query % _checksum_sql_filter) h.execute(**exec_args) row = h.fetchone_dict() package['checksum_type'] = row['checksum_type'] package['checksum'] = row['checksum'] package['org_id'] = org_id package['channels'] = channelList batch.append(IncompletePackage().populate(package)) caller = "server.app.channelPackageSubscription" backend = SQLBackend() importer = ChannelPackageSubscription(batch, backend, caller=caller) try: importer.run() except IncompatibleArchError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, string.join(e.args), explain=0), sys.exc_info()[2]) except InvalidChannelError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, str(e), explain=0), sys.exc_info()[2]) affected_channels = importer.affected_channels log_debug(3, "Computing errata cache for systems affected by channels", affected_channels) schedule_errata_cache_update(affected_channels) rhnSQL.commit() return 0
def _channelPackageSubscription(self, authobj, info): # Authorize the org id passed authobj.authzOrg(info) packageList = info.get('packages') or [] if not packageList: log_debug(1, "No packages found; done") return 0 if 'channels' not in info or not info['channels']: log_debug(1, "No channels found; done") return 0 channelList = info['channels'] authobj.authzChannels(channelList) # Have to turn the channel list into a list of Channel objects channelList = [Channel().populate({'label': x}) for x in channelList] # Since we're dealing with superusers, we allow them to change the org # id # XXX check if we don't open ourselves too much (misa 20030422) org_id = info.get('orgId') if org_id == '': org_id = None batch = Collection() package_keys = ['name', 'version', 'release', 'epoch', 'arch'] for package in packageList: for k in package_keys: if k not in package: raise Exception("Missing key %s" % k) if k == 'epoch': if package[k] is not None: if package[k] == '': package[k] = None else: package[k] = str(package[k]) else: package[k] = str(package[k]) if package['arch'] == 'src' or package['arch'] == 'nosrc': # Source package - no reason to continue continue _checksum_sql_filter = "" if 'md5sum' in package: # for old rhnpush compatibility package['checksum_type'] = 'md5' package['checksum'] = package['md5sum'] exec_args = { 'name': package['name'], 'pkg_epoch': package['epoch'], 'pkg_version': package['version'], 'pkg_rel': package['release'], 'pkg_arch': package['arch'], 'orgid': org_id } if 'checksum' in package and CFG.ENABLE_NVREA: _checksum_sql_filter = """and c.checksum = :checksum and c.checksum_type = :checksum_type""" exec_args.update({'checksum_type': package['checksum_type'], 'checksum': package['checksum']}) h = rhnSQL.prepare(self._get_pkg_info_query % _checksum_sql_filter) h.execute(**exec_args) row = h.fetchone_dict() package['checksum_type'] = row['checksum_type'] package['checksum'] = row['checksum'] package['org_id'] = org_id package['channels'] = channelList batch.append(IncompletePackage().populate(package)) caller = "server.app.channelPackageSubscription" backend = SQLBackend() importer = ChannelPackageSubscription(batch, backend, caller=caller) try: importer.run() except IncompatibleArchError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, string.join(e.args), explain=0), sys.exc_info()[2]) except InvalidChannelError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, str(e), explain=0), sys.exc_info()[2]) affected_channels = importer.affected_channels log_debug(3, "Computing errata cache for systems affected by channels", affected_channels) schedule_errata_cache_update(affected_channels) rhnSQL.commit() return 0
def import_packages(self, plug, source_id, url): failed_packages = 0 if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.org_id) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum( db_pack['path'], pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False # just pass data from DB, they will be used in strict channel # linking if there is no new RPM downloaded pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return failed_packages else: log( 0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) is_non_local_repo = (url.find("file:/") < 0) downloader = ThreadedDownloader() to_download_count = 0 for what in to_process: pack, to_download, to_link = what if to_download: target_file = os.path.join( plug.repo.pkgdir, os.path.basename(pack.unique_id.relativepath)) pack.path = target_file params = {} if self.metadata_only: bytes_range = (0, pack.unique_id.hdrend) checksum_type = None checksum = None else: bytes_range = None checksum_type = pack.checksum_type checksum = pack.checksum plug.set_download_parameters(params, pack.unique_id.relativepath, target_file, checksum_type=checksum_type, checksum_value=checksum, bytes_range=bytes_range) downloader.add(params) to_download_count += 1 if num_to_process != 0: log(0, "New packages to download: %5d" % to_download_count) logger = TextLogger(None, to_download_count) downloader.set_log_obj(logger) downloader.run() log2disk(0, "Importing packages started.") progress_bar = ProgressBarLogger("Importing packages: ", to_download_count) for (index, what) in enumerate(to_process): pack, to_download, to_link = what if not to_download: continue localpath = pack.path # pylint: disable=W0703 try: if os.path.exists(localpath): pack.load_checksum_from_header() rel_package_path = pack.upload_package( self.org_id, metadata_only=self.metadata_only) # Save uploaded package to cache with repository checksum type if rel_package_path: self.checksum_cache[rel_package_path] = { pack.checksum_type: pack.checksum } # we do not want to keep a whole 'a_pkg' object for every package in memory, # because we need only checksum. see BZ 1397417 pack.checksum = pack.a_pkg.checksum pack.checksum_type = pack.a_pkg.checksum_type pack.epoch = pack.a_pkg.header['epoch'] pack.a_pkg = None else: raise Exception progress_bar.log(True, None) except KeyboardInterrupt: raise except Exception: failed_packages += 1 e = str(sys.exc_info()[1]) if e: log2(0, 1, e, stream=sys.stderr) if self.fail: raise to_process[index] = (pack, False, False) self.all_packages.remove(pack) progress_bar.log(False, None) finally: if is_non_local_repo and localpath and os.path.exists( localpath): os.remove(localpath) log2disk(0, "Importing packages finished.") if self.strict: # Need to make sure all packages from all repositories are associated with channel import_batch = [ self.associate_package(pack) for pack in self.all_packages ] else: # Only packages from current repository are appended to channel import_batch = [ self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link ] # Do not re-link if nothing was marked to link if any([to_link for (pack, to_download, to_link) in to_process]): log(0, "Linking packages to channel.") backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit() self.regen = True return failed_packages
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception: e = sys.exc_info()[1] self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
raise except Exception, e: self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False) importer.run() backend.commit() @staticmethod def match_package_checksum(abspath, checksum_type, checksum): if (os.path.exists(abspath) and getFileChecksum(checksum_type, filename=abspath) == checksum): return 1 return 0 def associate_package(self, pack): package = {} package['name'] = pack.name package['version'] = pack.version package['release'] = pack.release
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) # just pass data from DB, they will be used if there is no RPM available pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return else: log(0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only) pack.load_checksum_from_header() pack.upload_package(self.channel, metadata_only=self.metadata_only) except KeyboardInterrupt: raise except Exception: e = sys.exc_info()[1] log2stderr(0, e) if self.fail: raise to_process[index] = (pack, False, False) continue finally: if is_non_local_repo and localpath and os.path.exists(localpath): os.remove(localpath) log(0, "Linking packages to channel.") if self.strict: import_batch = [self.associate_package(pack) for pack in self.all_packages] else: import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [ self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link ] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False) importer.run() backend.commit() @staticmethod def match_package_checksum(abspath, checksum_type, checksum): if (os.path.exists(abspath) and getFileChecksum( checksum_type, filename=abspath) == checksum): return 1 return 0 def associate_package(self, pack): package = {} package['name'] = pack.name package['version'] = pack.version
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) # just pass data from DB, they will be used if there is no RPM available pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return else: log(0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only) pack.load_checksum_from_header() pack.upload_package(self.channel, metadata_only=self.metadata_only) except KeyboardInterrupt: raise except Exception: e = sys.exc_info()[1] log2stderr(0, e) if self.fail: raise to_process[index] = (pack, False, False) continue finally: if is_non_local_repo and localpath and os.path.exists(localpath): os.remove(localpath) log(0, "Linking packages to channel.") if self.strict: import_batch = [self.associate_package(pack) for pack in self.all_packages] else: import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
def _channelPackageSubscription(self, authobj, info): # Authorize the org id passed authobj.authzOrg(info) packageList = info.get('packages') or [] if not packageList: log_debug(1, "No packages found; done") return 0 if not info.has_key('channels') or not info['channels']: log_debug(1, "No channels found; done") return 0 channelList = info['channels'] authobj.authzChannels(channelList) # Have to turn the channel list into a list of Channel objects channelList = map(lambda x: Channel().populate({'label' : x}), channelList) # Since we're dealing with superusers, we allow them to change the org # id # XXX check if we don't open ourselves too much (misa 20030422) org_id = info.get('orgId') if org_id == '': org_id = None batch = Collection() package_keys = ['name', 'version', 'release', 'epoch', 'arch'] for package in packageList: for k in package_keys: if not package.has_key(k): raise Exception("Missing key %s" % k) if package['arch'] == 'src' or package['arch'] == 'nosrc': # Source package - no reason to continue continue _checksum_sql_filter = "" checksum_exists = 0 if 'md5sum' in package: # for old rhnpush compatibility package['checksum_type'] = 'md5' package['checksum'] = package['md5sum'] if package.has_key('checksum') and CFG.ENABLE_NVREA: checksum_exists = 1 _checksum_sql_filter = """and c.checksum = :checksum and c.checksum_type = :checksum_type""" h = rhnSQL.prepare(self._get_pkg_info_query % \ _checksum_sql_filter) pkg_epoch = None if package['epoch'] != '': pkg_epoch = package['epoch'] if checksum_exists: h.execute(pkg_name=package['name'], \ pkg_epoch=pkg_epoch, \ pkg_version=package['version'], \ pkg_rel=package['release'],pkg_arch=package['arch'], \ orgid = org_id, \ checksum_type = package['checksum_type'], \ checksum = package['checksum']) else: h.execute(pkg_name=package['name'], \ pkg_epoch=pkg_epoch, \ pkg_version=package['version'], \ pkg_rel=package['release'], \ pkg_arch=package['arch'], orgid = org_id ) row = h.fetchone_dict() package['checksum_type'] = row['checksum_type'] package['checksum'] = row['checksum'] package['org_id'] = org_id package['channels'] = channelList batch.append(IncompletePackage().populate(package)) caller = "server.app.channelPackageSubscription" if CFG.DB_BACKEND == ORACLE: from spacewalk.server.importlib.backendOracle import OracleBackend backend = OracleBackend() elif CFG.DB_BACKEND == POSTGRESQL: from spacewalk.server.importlib.backendOracle import PostgresqlBackend backend = PostgresqlBackend() backend.init() importer = ChannelPackageSubscription(batch, backend, caller=caller) try: importer.run() except IncompatibleArchError, e: raise rhnFault(50, string.join(e.args), explain=0)
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception: e = sys.exc_info()[1] self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False) importer.run() backend.commit()
def _importer_run(self, package, caller, backend): importer = ChannelPackageSubscription( [IncompletePackage().populate(package)], backend, caller=caller, repogen=False) importer.run()