def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception: e = sys.exc_info()[1] self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
def import_packages(self, plug, source_id, url): failed_packages = 0 if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.org_id) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum( db_pack['path'], pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False # just pass data from DB, they will be used in strict channel # linking if there is no new RPM downloaded pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return failed_packages else: log( 0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) is_non_local_repo = (url.find("file:/") < 0) downloader = ThreadedDownloader() to_download_count = 0 for what in to_process: pack, to_download, to_link = what if to_download: target_file = os.path.join( plug.repo.pkgdir, os.path.basename(pack.unique_id.relativepath)) pack.path = target_file params = {} if self.metadata_only: bytes_range = (0, pack.unique_id.hdrend) checksum_type = None checksum = None else: bytes_range = None checksum_type = pack.checksum_type checksum = pack.checksum plug.set_download_parameters(params, pack.unique_id.relativepath, target_file, checksum_type=checksum_type, checksum_value=checksum, bytes_range=bytes_range) downloader.add(params) to_download_count += 1 if num_to_process != 0: log(0, "New packages to download: %5d" % to_download_count) logger = TextLogger(None, to_download_count) downloader.set_log_obj(logger) downloader.run() log2disk(0, "Importing packages started.") progress_bar = ProgressBarLogger("Importing packages: ", to_download_count) for (index, what) in enumerate(to_process): pack, to_download, to_link = what if not to_download: continue localpath = pack.path # pylint: disable=W0703 try: if os.path.exists(localpath): pack.load_checksum_from_header() rel_package_path = pack.upload_package( self.org_id, metadata_only=self.metadata_only) # Save uploaded package to cache with repository checksum type if rel_package_path: self.checksum_cache[rel_package_path] = { pack.checksum_type: pack.checksum } # we do not want to keep a whole 'a_pkg' object for every package in memory, # because we need only checksum. see BZ 1397417 pack.checksum = pack.a_pkg.checksum pack.checksum_type = pack.a_pkg.checksum_type pack.epoch = pack.a_pkg.header['epoch'] pack.a_pkg = None else: raise Exception progress_bar.log(True, None) except KeyboardInterrupt: raise except Exception: failed_packages += 1 e = str(sys.exc_info()[1]) if e: log2(0, 1, e, stream=sys.stderr) if self.fail: raise to_process[index] = (pack, False, False) self.all_packages.remove(pack) progress_bar.log(False, None) finally: if is_non_local_repo and localpath and os.path.exists( localpath): os.remove(localpath) log2disk(0, "Importing packages finished.") if self.strict: # Need to make sure all packages from all repositories are associated with channel import_batch = [ self.associate_package(pack) for pack in self.all_packages ] else: # Only packages from current repository are appended to channel import_batch = [ self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link ] # Do not re-link if nothing was marked to link if any([to_link for (pack, to_download, to_link) in to_process]): log(0, "Linking packages to channel.") backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit() self.regen = True return failed_packages
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) # just pass data from DB, they will be used if there is no RPM available pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return else: log(0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only) pack.load_checksum_from_header() pack.upload_package(self.channel, metadata_only=self.metadata_only) except KeyboardInterrupt: raise except Exception: e = sys.exc_info()[1] log2stderr(0, e) if self.fail: raise to_process[index] = (pack, False, False) continue finally: if is_non_local_repo and localpath and os.path.exists(localpath): os.remove(localpath) log(0, "Linking packages to channel.") if self.strict: import_batch = [self.associate_package(pack) for pack in self.all_packages] else: import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters) if self.latest: packages = latest_packages(packages) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file://") < 0) def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception, e: self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) # just pass data from DB, they will be used if there is no RPM available pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return else: log(0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only) pack.load_checksum_from_header() pack.upload_package(self.channel, metadata_only=self.metadata_only) except KeyboardInterrupt: raise except Exception: e = sys.exc_info()[1] log2stderr(0, e) if self.fail: raise to_process[index] = (pack, False, False) continue finally: if is_non_local_repo and localpath and os.path.exists(localpath): os.remove(localpath) log(0, "Linking packages to channel.") if self.strict: import_batch = [self.associate_package(pack) for pack in self.all_packages] else: import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare( """ select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """ ) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row["flag"], re.split("[,\s]+", row["filter"])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel["id"]) if self.channel["org_id"]: self.channel["org_id"] = int(self.channel["org_id"]) else: self.channel["org_id"] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel["org_id"] ) to_download = True to_link = True if db_pack["path"]: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack["path"]) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack["channel_id"] == channel_id: # package is already in the channel to_link = False elif db_pack["channel_id"] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = url.find("file://") < 0 def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception, e: self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue