def _update_families_ssl(self): """Link channel families with certificates inserted in _update_certificates method""" family_ids = {} for family in self.families_to_import: family_ids[family] = None # Populate with IDs backend = SQLBackend() backend.lookupChannelFamilies(family_ids) # Lookup CA cert ca_cert = satCerts.lookup_cert(constants.CA_CERT_NAME, None) ca_cert_id = int(ca_cert['id']) # Queries for updating relation between channel families and certificates hdel = rhnSQL.prepare(""" delete from rhnContentSsl where channel_family_id = :cfid """) hins = rhnSQL.prepare(""" insert into rhnContentSsl (channel_family_id, ssl_ca_cert_id, ssl_client_cert_id, ssl_client_key_id) values (:cfid, :ca_cert_id, :client_cert_id, :client_key_id) """) for entitlement in self.manifest.get_all_entitlements(): creds = entitlement.get_credentials() client_cert = satCerts.lookup_cert( constants.CLIENT_CERT_PREFIX + creds.get_id(), None) client_key = satCerts.lookup_cert( constants.CLIENT_KEY_PREFIX + creds.get_id(), None) client_cert_id = int(client_cert['id']) client_key_id = int(client_key['id']) family_ids_to_link = [] for product_id in entitlement.get_product_ids(): try: product = self.products[product_id] for family in product['families']: if family in family_ids: family_ids_to_link.append(family_ids[family]) except KeyError: print("Cannot map product '%s' into channel families" % product_id) family_ids_to_link = set(family_ids_to_link) for cfid in family_ids_to_link: hdel.execute(cfid=cfid) hins.execute(cfid=cfid, ca_cert_id=ca_cert_id, client_cert_id=client_cert_id, client_key_id=client_key_id) rhnSQL.commit()
def _update_repositories(self): """Setup SSL credential to access repositories We do this in 2 steps: 1. Fetching provided repositories from manifest - URL contains variables to substitute 2. Assigning one certificate/key set to each repository""" # First delete all repositories from previously used manifests self._remove_repositories() backend = SQLBackend() type_id = backend.lookupContentSourceType('yum') # Lookup CA cert ca_cert = satCerts.lookup_cert(constants.CA_CERT_NAME, None) ca_cert_id = int(ca_cert['id']) content_sources_batch = {} for entitlement in self.manifest.get_all_entitlements(): # Lookup SSL certificates and keys creds = entitlement.get_credentials() client_cert = satCerts.lookup_cert( constants.CLIENT_CERT_PREFIX + creds.get_id(), None) client_key = satCerts.lookup_cert( constants.CLIENT_KEY_PREFIX + creds.get_id(), None) client_cert_id = int(client_cert['id']) client_key_id = int(client_key['id']) content_source_ssl = ContentSourceSsl() content_source_ssl['ssl_ca_cert_id'] = ca_cert_id content_source_ssl['ssl_client_cert_id'] = client_cert_id content_source_ssl['ssl_client_key_id'] = client_key_id # Loop provided products for product in entitlement.get_products(): repositories = product.get_repositories() for repository in repositories: if repository not in content_sources_batch: content_source = ContentSource() content_source[ 'label'] = constants.MANIFEST_REPOSITORY_DB_PREFIX + repository content_source['source_url'] = repositories[repository] content_source['org_id'] = None content_source['type_id'] = type_id content_source['ssl-sets'] = [content_source_ssl] content_sources_batch[repository] = content_source # There may be more SSL certs to one repository, append it elif content_source_ssl not in content_sources_batch[ repository]['ssl-sets']: content_sources_batch[repository]['ssl-sets'].append( content_source_ssl) importer = ContentSourcesImport(list(content_sources_batch.values()), backend) importer.run()
def _update_families_ssl(self): """Link channel families with certificates inserted in _update_certificates method""" family_ids = {} for family in self.families_to_import: family_ids[family] = None # Populate with IDs backend = SQLBackend() backend.lookupChannelFamilies(family_ids) # Lookup CA cert ca_cert = satCerts.lookup_cert(constants.CA_CERT_NAME, None) ca_cert_id = int(ca_cert['id']) # Queries for updating relation between channel families and certificates hdel = rhnSQL.prepare(""" delete from rhnContentSsl where channel_family_id = :cfid """) hins = rhnSQL.prepare(""" insert into rhnContentSsl (channel_family_id, ssl_ca_cert_id, ssl_client_cert_id, ssl_client_key_id) values (:cfid, :ca_cert_id, :client_cert_id, :client_key_id) """) for entitlement in self.manifest.get_all_entitlements(): creds = entitlement.get_credentials() client_cert = satCerts.lookup_cert(constants.CLIENT_CERT_PREFIX + creds.get_id(), None) client_key = satCerts.lookup_cert(constants.CLIENT_KEY_PREFIX + creds.get_id(), None) client_cert_id = int(client_cert['id']) client_key_id = int(client_key['id']) family_ids_to_link = [] for product_id in entitlement.get_product_ids(): try: product = self.products[product_id] for family in product['families']: if family in family_ids: family_ids_to_link.append(family_ids[family]) except KeyError: print("Cannot map product '%s' into channel families" % product_id) family_ids_to_link = set(family_ids_to_link) for cfid in family_ids_to_link: hdel.execute(cfid=cfid) hins.execute(cfid=cfid, ca_cert_id=ca_cert_id, client_cert_id=client_cert_id, client_key_id=client_key_id) rhnSQL.commit()
def _update_repositories(self): """Setup SSL credential to access repositories We do this in 2 steps: 1. Fetching provided repositories from manifest - URL contains variables to substitute 2. Assigning one certificate/key set to each repository""" # First delete all repositories from previously used manifests self._remove_repositories() backend = SQLBackend() type_id = backend.lookupContentSourceType('yum') # Lookup CA cert ca_cert = satCerts.lookup_cert(constants.CA_CERT_NAME, None) ca_cert_id = int(ca_cert['id']) content_sources_batch = {} for entitlement in self.manifest.get_all_entitlements(): # Lookup SSL certificates and keys creds = entitlement.get_credentials() client_cert = satCerts.lookup_cert(constants.CLIENT_CERT_PREFIX + creds.get_id(), None) client_key = satCerts.lookup_cert(constants.CLIENT_KEY_PREFIX + creds.get_id(), None) client_cert_id = int(client_cert['id']) client_key_id = int(client_key['id']) content_source_ssl = ContentSourceSsl() content_source_ssl['ssl_ca_cert_id'] = ca_cert_id content_source_ssl['ssl_client_cert_id'] = client_cert_id content_source_ssl['ssl_client_key_id'] = client_key_id # Loop provided products for product in entitlement.get_products(): repositories = product.get_repositories() for repository in repositories: if repository not in content_sources_batch: content_source = ContentSource() content_source['label'] = constants.MANIFEST_REPOSITORY_DB_PREFIX + repository content_source['source_url'] = repositories[repository] content_source['org_id'] = None content_source['type_id'] = type_id content_source['ssl-sets'] = [content_source_ssl] content_sources_batch[repository] = content_source # There may be more SSL certs to one repository, append it elif content_source_ssl not in content_sources_batch[repository]['ssl-sets']: content_sources_batch[repository]['ssl-sets'].append(content_source_ssl) importer = ContentSourcesImport(content_sources_batch.values(), backend) importer.run()
def import_channel_families(self): """Insert channel family data into DB.""" # Debug print("Channel families in cert: %d" % len(self.sat5_cert.channel_families)) # pylint: disable=E1101 batch = [] for cf in self.sat5_cert.channel_families: # pylint: disable=E1101 label = cf.name try: family = self.families[label] family_object = ChannelFamily() for k in family.keys(): family_object[k] = family[k] family_object['label'] = label batch.append(family_object) self.families_to_import.append(label) except KeyError: print("ERROR: Channel family '%s' was not found in mapping" % label) # Perform import backend = SQLBackend() importer = ChannelFamilyImport(batch, backend) importer.run()
def count_packages(self): start_time = int(time.time()) backend = SQLBackend() base_channels = self._list_available_channels() repo_list = [] for base_channel in sorted(base_channels): for channel in sorted(base_channels[base_channel] + [base_channel]): repo_list.extend(self._get_content_sources(channel, backend)) log(0, "Number of repositories: %d" % len(repo_list)) already_downloaded = 0 print_progress_bar(already_downloaded, len(repo_list), prefix='Downloading repodata:', suffix='Complete', bar_length=50) for base_channel in sorted(base_channels): for channel in sorted(base_channels[base_channel] + [base_channel]): family_label = self.channel_to_family[channel] keys = self._get_family_keys(family_label) sources = self._get_content_sources(channel, backend) list_packages = [] for source in sources: list_packages.extend( self._count_packages_in_repo(source['source_url'], keys)) already_downloaded += 1 print_progress_bar(already_downloaded, len(repo_list), prefix='Downloading repodata:', suffix='Complete', bar_length=50) cdn_repodata_path = constants.CDN_REPODATA_ROOT + '/' + channel # create directory for repo data if it doesn't exist try: os.makedirs(cdn_repodata_path) except OSError: exc = sys.exc_info()[1] if exc.errno == errno.EEXIST and os.path.isdir( cdn_repodata_path): pass else: raise f_out = open(cdn_repodata_path + '/' + "packages_num", 'w') try: f_out.write(str(len(set(list_packages)))) finally: if f_out is not None: f_out.close() elapsed_time = int(time.time()) log(0, "Elapsed time: %d seconds" % (elapsed_time - start_time))
def import_channel_families(self): """Insert channel family data into DB.""" log(1, "Channel families in manifest: %d" % len(self.sat5_cert.channel_families)) # pylint: disable=E1101 batch = [] for cf in self.sat5_cert.channel_families: # pylint: disable=E1101 label = cf.name try: family = self.families[label] family_object = ChannelFamily() for k in family.keys(): family_object[k] = family[k] family_object['label'] = label batch.append(family_object) self.families_to_import.append(label) except KeyError: # While channel mappings are not consistent with certificate generated on RHN... msg = ("WARNING: Channel family '%s' is provided by manifest but " "was not found in cdn-sync mappings." % label) log2(0, 1, msg, stream=sys.stderr) log(1, "Channel families to import: %d" % len(batch)) # Perform import backend = SQLBackend() importer = ChannelFamilyImport(batch, backend) importer.run()
def _update_channels_metadata(self, channels): # First populate rhnProductName table self._update_product_names(channels) backend = SQLBackend() channels_batch = [] content_sources_batch = [] for label in channels: channel = self.channel_metadata[label] channel_object = Channel() for k in channel.keys(): channel_object[k] = channel[k] family_object = ChannelFamily() family_object['label'] = self.channel_to_family[label] channel_object['families'] = [family_object] channel_object['label'] = label channel_object['basedir'] = '/' # Backend expects product_label named as product_name # To have correct value in rhnChannelProduct and reference # to rhnProductName in rhnChannel channel_object['product_name'] = channel['product_label'] dists = [] releases = [] # Distribution/Release channel mapping available if label in self.channel_dist_mapping: dist_map = self.channel_dist_mapping[label] for item in dist_map: if item['eus_release']: r = ReleaseChannelMap() r['product'] = item['os'] r['version'] = item['release'] r['release'] = item['eus_release'] r['channel_arch'] = item['channel_arch'] releases.append(r) else: d = DistChannelMap() for k in item: d[k] = item[k] dists.append(d) channel_object['dists'] = dists channel_object['release'] = releases sources = self.cdn_repository_manager.get_content_sources_import_batch(label, backend) content_sources_batch.extend(sources) channel_object['content-sources'] = sources channels_batch.append(channel_object) importer = ContentSourcesImport(content_sources_batch, backend) importer.run() importer = ChannelImport(channels_batch, backend) importer.run()
def assign_repositories_to_channel(self, channel_label, delete_repos=None, add_repos=None): backend = SQLBackend() self.unlink_all_repos(channel_label, custom_only=True) repos = self.list_associated_repos(channel_label) changed = 0 if delete_repos: for to_delete in delete_repos: if to_delete in repos: repos.remove(to_delete) log(0, "Removing repository '%s' from channel." % to_delete) changed += 1 else: log2(0, 0, "WARNING: Repository '%s' is not attached to channel." % to_delete, stream=sys.stderr) if add_repos: for to_add in add_repos: if to_add not in repos: repos.append(to_add) log(0, "Attaching repository '%s' to channel." % to_add) changed += 1 else: log2(0, 0, "WARNING: Repository '%s' is already attached to channel." % to_add, stream=sys.stderr) # If there are any repositories intended to be attached to channel if repos: content_sources_batch = self.get_content_sources_import_batch( channel_label, backend, repos=sorted(repos)) for content_source in content_sources_batch: content_source['channels'] = [channel_label] importer = ContentSourcesImport(content_sources_batch, backend) importer.run() else: # Make sure everything is unlinked self.unlink_all_repos(channel_label) return changed
def _update_product_names(self, channels): backend = SQLBackend() batch = [] for label in channels: channel = self.channel_metadata[label] if channel['product_label'] and channel['product_name']: product_name = ProductName() product_name['label'] = channel['product_label'] product_name['name'] = channel['product_name'] batch.append(product_name) importer = ProductNamesImport(batch, backend) importer.run()
def uploadPackages(info, source=0, force=0, caller=None): log_debug(4, source, force, caller) batch = Collection() packageList = info.get("packages") or [] if not packageList: raise Exception("Nothing to do") org_id = info.get('orgId') if org_id == '': org_id = None if source: channelList = [] else: channelList = info.get("channels") or [] for package in packageList: p = __processPackage(package, org_id, channelList, source) batch.append(p) backend = SQLBackend() importer = packageImporter(batch, backend, source, caller=caller) importer.setUploadForce(force) importer.run() if not source: importer.subscribeToChannels() # Split the result in two lists - already uploaded and new packages newpkgs = [] uploaded = [] for pkg in importer.status(): if pkg.ignored or pkg.diff: uploaded.append(pkg) else: newpkgs.append(pkg) # Schedule an errata cache update only if we touched the channels if not source: # makes sense only for binary packages schedule_errata_cache_update(importer.affected_channels) taskomatic.add_to_repodata_queue_for_channel_package_subscription( importer.affected_channels, batch, caller) rhnSQL.commit() return _formatStatus(uploaded), _formatStatus(newpkgs)
def _update_channel_families(self): """Insert channel family data into DB""" families_in_mapping = [] for entitlement in self.manifest.get_all_entitlements(): for product_id in entitlement.get_product_ids(): try: product = self.products[product_id] families_in_mapping.extend(product['families']) # Some product cannot be mapped into channel families except KeyError: print("Cannot map product '%s' into channel families" % product_id) families_in_mapping = set(families_in_mapping) # Debug print("Channel families mapped from products: %d" % len(self.families_to_import)) print("Channel families in cert: %d" % len(self.sat5_cert.channel_families)) # pylint: disable=E1101 batch = [] for cf in self.sat5_cert.channel_families: # pylint: disable=E1101 label = cf.name if label not in families_in_mapping: print( "Skipping channel family from certificate, not in the mapping: %s" % label) continue try: family = self.families[label] family_object = ChannelFamily() for k in family.keys(): family_object[k] = family[k] family_object['label'] = label batch.append(family_object) self.families_to_import.append(label) except KeyError: print("ERROR: Channel family '%s' was not found in mapping" % label) # Perform import backend = SQLBackend() importer = ChannelFamilyImport(batch, backend) importer.run()
def _channelPackageSubscription(self, authobj, info): # Authorize the org id passed authobj.authzOrg(info) packageList = info.get('packages') or [] if not packageList: log_debug(1, "No packages found; done") return 0 if 'channels' not in info or not info['channels']: log_debug(1, "No channels found; done") return 0 channelList = info['channels'] authobj.authzChannels(channelList) # Have to turn the channel list into a list of Channel objects channelList = [Channel().populate({'label': x}) for x in channelList] # Since we're dealing with superusers, we allow them to change the org # id # XXX check if we don't open ourselves too much (misa 20030422) org_id = info.get('orgId') if org_id == '': org_id = None batch = Collection() package_keys = ['name', 'version', 'release', 'epoch', 'arch'] for package in packageList: for k in package_keys: if k not in package: raise Exception("Missing key %s" % k) if k == 'epoch': if package[k] is not None: if package[k] == '': package[k] = None else: package[k] = str(package[k]) else: package[k] = str(package[k]) if package['arch'] == 'src' or package['arch'] == 'nosrc': # Source package - no reason to continue continue _checksum_sql_filter = "" if 'md5sum' in package: # for old rhnpush compatibility package['checksum_type'] = 'md5' package['checksum'] = package['md5sum'] exec_args = { 'name': package['name'], 'pkg_epoch': package['epoch'], 'pkg_version': package['version'], 'pkg_rel': package['release'], 'pkg_arch': package['arch'], 'orgid': org_id } if 'checksum' in package and CFG.ENABLE_NVREA: _checksum_sql_filter = """and c.checksum = :checksum and c.checksum_type = :checksum_type""" exec_args.update({ 'checksum_type': package['checksum_type'], 'checksum': package['checksum'] }) h = rhnSQL.prepare(self._get_pkg_info_query % _checksum_sql_filter) h.execute(**exec_args) row = h.fetchone_dict() package['checksum_type'] = row['checksum_type'] package['checksum'] = row['checksum'] package['org_id'] = org_id package['channels'] = channelList batch.append(IncompletePackage().populate(package)) caller = "server.app.channelPackageSubscription" backend = SQLBackend() importer = ChannelPackageSubscription(batch, backend, caller=caller) try: importer.run() except IncompatibleArchError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, string.join(e.args), explain=0), sys.exc_info()[2]) except InvalidChannelError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, str(e), explain=0), sys.exc_info()[2]) affected_channels = importer.affected_channels log_debug(3, "Computing errata cache for systems affected by channels", affected_channels) schedule_errata_cache_update(affected_channels) rhnSQL.commit() return 0
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception: e = sys.exc_info()[1] self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
def get_backend(self): if self.__backend: return self.__backend Backend.__backend = SQLBackend() return Backend.__backend
def import_packages(self, plug, source_id, url): failed_packages = 0 if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.org_id) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum( db_pack['path'], pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False # just pass data from DB, they will be used in strict channel # linking if there is no new RPM downloaded pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return failed_packages else: log( 0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) is_non_local_repo = (url.find("file:/") < 0) downloader = ThreadedDownloader() to_download_count = 0 for what in to_process: pack, to_download, to_link = what if to_download: target_file = os.path.join( plug.repo.pkgdir, os.path.basename(pack.unique_id.relativepath)) pack.path = target_file params = {} if self.metadata_only: bytes_range = (0, pack.unique_id.hdrend) checksum_type = None checksum = None else: bytes_range = None checksum_type = pack.checksum_type checksum = pack.checksum plug.set_download_parameters(params, pack.unique_id.relativepath, target_file, checksum_type=checksum_type, checksum_value=checksum, bytes_range=bytes_range) downloader.add(params) to_download_count += 1 if num_to_process != 0: log(0, "New packages to download: %5d" % to_download_count) logger = TextLogger(None, to_download_count) downloader.set_log_obj(logger) downloader.run() log2disk(0, "Importing packages started.") progress_bar = ProgressBarLogger("Importing packages: ", to_download_count) for (index, what) in enumerate(to_process): pack, to_download, to_link = what if not to_download: continue localpath = pack.path # pylint: disable=W0703 try: if os.path.exists(localpath): pack.load_checksum_from_header() rel_package_path = pack.upload_package( self.org_id, metadata_only=self.metadata_only) # Save uploaded package to cache with repository checksum type if rel_package_path: self.checksum_cache[rel_package_path] = { pack.checksum_type: pack.checksum } # we do not want to keep a whole 'a_pkg' object for every package in memory, # because we need only checksum. see BZ 1397417 pack.checksum = pack.a_pkg.checksum pack.checksum_type = pack.a_pkg.checksum_type pack.epoch = pack.a_pkg.header['epoch'] pack.a_pkg = None else: raise Exception progress_bar.log(True, None) except KeyboardInterrupt: raise except Exception: failed_packages += 1 e = str(sys.exc_info()[1]) if e: log2(0, 1, e, stream=sys.stderr) if self.fail: raise to_process[index] = (pack, False, False) self.all_packages.remove(pack) progress_bar.log(False, None) finally: if is_non_local_repo and localpath and os.path.exists( localpath): os.remove(localpath) log2disk(0, "Importing packages finished.") if self.strict: # Need to make sure all packages from all repositories are associated with channel import_batch = [ self.associate_package(pack) for pack in self.all_packages ] else: # Only packages from current repository are appended to channel import_batch = [ self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link ] # Do not re-link if nothing was marked to link if any([to_link for (pack, to_download, to_link) in to_process]): log(0, "Linking packages to channel.") backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit() self.regen = True return failed_packages
def upload_updates(self, notices): batch = [] typemap = { 'security': 'Security Advisory', 'recommended': 'Bug Fix Advisory', 'bugfix': 'Bug Fix Advisory', 'optional': 'Product Enhancement Advisory', 'feature': 'Product Enhancement Advisory', 'enhancement': 'Product Enhancement Advisory' } channel_advisory_names = self.list_errata() for notice in notices: notice = self.fix_notice(notice) if not self.force_all_errata and notice[ 'update_id'] in channel_advisory_names: continue advisory = notice['update_id'] + '-' + notice['version'] existing_errata = self.get_errata(notice['update_id']) e = Erratum() e['errata_from'] = notice['from'] e['advisory'] = advisory e['advisory_name'] = notice['update_id'] e['advisory_rel'] = notice['version'] e['advisory_type'] = typemap.get(notice['type'], 'Product Enhancement Advisory') e['product'] = notice['release'] or 'Unknown' e['description'] = notice['description'] e['synopsis'] = notice['title'] or notice['update_id'] if (notice['type'] == 'security' and notice['severity'] and not e['synopsis'].startswith(notice['severity'] + ': ')): e['synopsis'] = notice['severity'] + ': ' + e['synopsis'] if 'summary' in notice and not notice['summary'] is None: e['topic'] = notice['summary'] else: e['topic'] = ' ' if 'solution' in notice and not notice['solution'] is None: e['solution'] = notice['solution'] else: e['solution'] = ' ' e['issue_date'] = self._to_db_date(notice['issued']) if notice['updated']: e['update_date'] = self._to_db_date(notice['updated']) else: e['update_date'] = self._to_db_date(notice['issued']) e['org_id'] = self.org_id e['notes'] = '' e['channels'] = [] e['packages'] = [] e['files'] = [] if existing_errata: e['channels'] = existing_errata['channels'] e['packages'] = existing_errata['packages'] e['channels'].append({'label': self.channel_label}) for pkg in notice['pkglist'][0]['packages']: param_dict = { 'name': pkg['name'], 'version': pkg['version'], 'release': pkg['release'], 'arch': pkg['arch'], 'channel_id': int(self.channel['id']), } if pkg['epoch'] == '0': epochStatement = "(pevr.epoch is NULL or pevr.epoch = '0')" elif pkg['epoch'] is None or pkg['epoch'] == '': epochStatement = "pevr.epoch is NULL" else: epochStatement = "pevr.epoch = :epoch" param_dict['epoch'] = pkg['epoch'] if self.org_id: param_dict['org_id'] = self.org_id orgStatement = "= :org_id" else: orgStatement = "is NULL" h = rhnSQL.prepare(""" select p.id, pevr.epoch, c.checksum, c.checksum_type from rhnPackage p join rhnPackagename pn on p.name_id = pn.id join rhnpackageevr pevr on p.evr_id = pevr.id join rhnpackagearch pa on p.package_arch_id = pa.id join rhnArchType at on pa.arch_type_id = at.id join rhnChecksumView c on p.checksum_id = c.id join rhnChannelPackage cp on p.id = cp.package_id where pn.name = :name and p.org_id %s and pevr.version = :version and pevr.release = :release and pa.label = :arch and %s and at.label = 'rpm' and cp.channel_id = :channel_id """ % (orgStatement, epochStatement)) h.execute(**param_dict) cs = h.fetchone_dict() or None if not cs: if 'epoch' in param_dict: epoch = param_dict['epoch'] + ":" else: epoch = "" log( 2, "No checksum found for %s-%s%s-%s.%s." " Skipping Package" % (param_dict['name'], epoch, param_dict['version'], param_dict['release'], param_dict['arch'])) continue newpkgs = [] for oldpkg in e['packages']: if oldpkg['package_id'] != cs['id']: newpkgs.append(oldpkg) package = IncompletePackage().populate(pkg) package['epoch'] = cs['epoch'] package['org_id'] = self.org_id package['checksums'] = {cs['checksum_type']: cs['checksum']} package['checksum_type'] = cs['checksum_type'] package['checksum'] = cs['checksum'] package['package_id'] = cs['id'] newpkgs.append(package) e['packages'] = newpkgs if len(e['packages']) == 0: # FIXME: print only with higher debug option log(2, "Advisory %s has empty package list." % e['advisory_name']) e['keywords'] = [] if notice['reboot_suggested']: kw = Keyword() kw.populate({'keyword': 'reboot_suggested'}) e['keywords'].append(kw) if notice['restart_suggested']: kw = Keyword() kw.populate({'keyword': 'restart_suggested'}) e['keywords'].append(kw) e['bugs'] = [] e['cve'] = [] if notice['references']: bzs = [ r for r in notice['references'] if r['type'] == 'bugzilla' ] if len(bzs): tmp = {} for bz in bzs: try: bz_id = int(bz['id']) # This can happen in some incorrectly generated updateinfo, let's be smart except ValueError: log( 2, "Bugzilla assigned to advisory %s has invalid id: %s, trying to get it from URL..." % (e['advisory_name'], bz['id'])) bz_id = int( re.search(r"\d+$", bz['href']).group(0)) if bz_id not in tmp: bug = Bug() bug.populate({ 'bug_id': bz_id, 'summary': bz['title'], 'href': bz['href'] }) e['bugs'].append(bug) tmp[bz_id] = None cves = [r for r in notice['references'] if r['type'] == 'cve'] if len(cves): tmp = {} for cve in cves: if cve['id'] not in tmp: e['cve'].append(cve['id']) tmp[cve['id']] = None others = [ r for r in notice['references'] if not r['type'] == 'bugzilla' and not r['type'] == 'cve' ] if len(others): tmp = len(others) refers_to = "" for other in others: if refers_to: refers_to += "\n" refers_to += other['href'] e['refers_to'] = refers_to e['locally_modified'] = None batch.append(e) if batch: log(0, "Syncing %s new errata to channel." % len(batch)) backend = SQLBackend() importer = ErrataImport(batch, backend) importer.run() self.regen = True elif notices: log(0, "No new errata to sync.")
def push_package(a_pkg, org_id=None, force=None, channels=[], relative_path=None): """Uploads a package""" # First write the package to the filesystem to final location try: importLib.move_package(a_pkg.payload_stream.name, basedir=CFG.MOUNT_POINT, relpath=relative_path, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, force=1) except OSError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, "Package upload failed: %s" % e), sys.exc_info()[2]) except importLib.FileConflictError: raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2]) except: raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2]) pkg = mpmSource.create_package(a_pkg.header, size=a_pkg.payload_size, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, relpath=relative_path, org_id=org_id, header_start=a_pkg.header_start, header_end=a_pkg.header_end, channels=channels) batch = importLib.Collection() batch.append(pkg) backend = SQLBackend() if force: upload_force = 4 else: upload_force = 0 importer = packageImport.packageImporter(batch, backend, source=a_pkg.header.is_source, caller="server.app.uploadPackage") importer.setUploadForce(upload_force) importer.run() package = batch[0] log_debug(5, "Package diff", package.diff) if package.diff and not force and package.diff.level > 1: # Packages too different; bail out log_debug(1, "Packages too different", package.toDict(), "Level:", package.diff.level) pdict = package.toDict() orig_path = package['path'] orig_path = os.path.join(CFG.MOUNT_POINT, orig_path) log_debug(4, "Original package", orig_path) # MPMs do not store their headers on disk, so we must avoid performing # operations which rely on information only contained in the headers # (such as header signatures). if os.path.exists(orig_path) and a_pkg.header.packaging != 'mpm': oh = rhn_pkg.get_package_header(orig_path) _diff_header_sigs(a_pkg.header, oh, pdict['diff']['diff']) return pdict, package.diff.level # Remove any pending scheduled file deletion for this package h = rhnSQL.prepare(""" delete from rhnPackageFileDeleteQueue where path = :path """) h.execute(path=relative_path) if package.diff and not force and package.diff.level: # No need to copy it - just the path is modified # pkilambi bug#180347 # case 1:check if the path exists in the db and also on the file system. # if it does then no need to copy # case2: file exists on file system but path not in db.then add the # realtive path in the db based on checksum of the pkg # case3: if no file on file system but path exists.then we write the # file to file system # case4:no file exists on FS and no path in db .then we write both. orig_path = package['path'] orig_path = os.path.join(CFG.MOUNT_POINT, orig_path) log_debug(3, "Original package", orig_path) # check included to query for source and binary rpms h_path_sql = """ select ps.path path from %s ps, rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype and ps.checksum_id = c.id and (ps.org_id = :org_id or (ps.org_id is null and :org_id is null) ) """ if a_pkg.header.is_source: h_package_table = 'rhnPackageSource' else: h_package_table = 'rhnPackage' h_path = rhnSQL.prepare(h_path_sql % h_package_table) h_path.execute(ctype=a_pkg.checksum_type, csum=a_pkg.checksum, org_id=org_id) rs_path = h_path.fetchall_dict() path_dict = {} if rs_path: path_dict = rs_path[0] if os.path.exists(orig_path) and path_dict['path']: return {}, 0 elif not path_dict['path']: h_upd = rhnSQL.prepare(""" update rhnpackage set path = :path where checksum_id = ( select id from rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype) """) h_upd.execute(path=relative_path, ctype=a_pkg.checksum_type, csum=a_pkg.checksum) # commit the transactions rhnSQL.commit() if not a_pkg.header.is_source: # Process Package Key information server_packages.processPackageKeyAssociations(a_pkg.header, a_pkg.checksum_type, a_pkg.checksum) if not a_pkg.header.is_source: errataCache.schedule_errata_cache_update(importer.affected_channels) log_debug(2, "Returning") return {}, 0
except KeyboardInterrupt: finally_remove(localpath) raise except Exception, e: self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False) importer.run() backend.commit() @staticmethod def match_package_checksum(abspath, checksum_type, checksum): if (os.path.exists(abspath) and getFileChecksum(checksum_type, filename=abspath) == checksum): return 1 return 0 def associate_package(self, pack): package = {}
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) to_process = [] num_passed = len(packages) self.print_msg("Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: self.print_msg("Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) if self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: self.print_msg("No new packages to sync.") return else: self.print_msg("Packages already synced: %5d" % (num_passed - num_to_process)) self.print_msg("Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) def finally_remove(path): if is_non_local_repo and path and os.path.exists(path): os.remove(path) # try/except/finally doesn't work in python 2.4 (RHEL5), so here's a hack for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: self.print_msg("%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack) pack.load_checksum_from_header() if to_download: pack.upload_package(self.channel) finally_remove(localpath) except KeyboardInterrupt: finally_remove(localpath) raise except Exception: e = sys.exc_info()[1] self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False) importer.run() backend.commit()
finally_remove(localpath) raise except Exception, e: self.error_msg(e) finally_remove(localpath) if self.fail: raise to_process[index] = (pack, False, False) continue self.print_msg("Linking packages to channel.") import_batch = [ self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link ] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False) importer.run() backend.commit() @staticmethod def match_package_checksum(abspath, checksum_type, checksum): if (os.path.exists(abspath) and getFileChecksum( checksum_type, filename=abspath) == checksum): return 1 return 0
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) # just pass data from DB, they will be used if there is no RPM available pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return else: log(0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only) pack.load_checksum_from_header() pack.upload_package(self.channel, metadata_only=self.metadata_only) except KeyboardInterrupt: raise except Exception: e = sys.exc_info()[1] log2stderr(0, e) if self.fail: raise to_process[index] = (pack, False, False) continue finally: if is_non_local_repo and localpath and os.path.exists(localpath): os.remove(localpath) log(0, "Linking packages to channel.") if self.strict: import_batch = [self.associate_package(pack) for pack in self.all_packages] else: import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
except OSError, e: raise rhnFault(50, "Package upload failed: %s" % e), None, sys.exc_info()[2] except importLib.FileConflictError: raise rhnFault(50, "File already exists"), None, sys.exc_info()[2] except: raise rhnFault(50, "File error"), None, sys.exc_info()[2] pkg = mpmSource.create_package(a_pkg.header, size=a_pkg.payload_size, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, relpath=relative_path, org_id=org_id, header_start=a_pkg.header_start, header_end=a_pkg.header_end, channels=channels) batch = importLib.Collection() batch.append(pkg) backend = SQLBackend() if force: upload_force = 4 else: upload_force = 0 importer = packageImport.packageImporter(batch, backend, source=a_pkg.header.is_source, caller="server.app.uploadPackage") importer.setUploadForce(upload_force) importer.run() package = batch[0] log_debug(5, "Package diff", package.diff) if package.diff and not force and package.diff.level > 1: # Packages too different; bail out
def import_packages(self, plug, source_id, url): if (not self.filters) and source_id: h = rhnSQL.prepare(""" select flag, filter from rhnContentSourceFilter where source_id = :source_id order by sort_order """) h.execute(source_id=source_id) filter_data = h.fetchall_dict() or [] filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) for row in filter_data] else: filters = self.filters packages = plug.list_packages(filters, self.latest) self.all_packages.extend(packages) to_process = [] num_passed = len(packages) log(0, "Packages in repo: %5d" % plug.num_packages) if plug.num_excluded: log(0, "Packages passed filter rules: %5d" % num_passed) channel_id = int(self.channel['id']) if self.channel['org_id']: self.channel['org_id'] = int(self.channel['org_id']) else: self.channel['org_id'] = None for pack in packages: db_pack = rhnPackage.get_info_for_package( [pack.name, pack.version, pack.release, pack.epoch, pack.arch], channel_id, self.channel['org_id']) to_download = True to_link = True # Package exists in DB if db_pack: # Path in filesystem is defined if db_pack['path']: pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) else: pack.path = "" if self.metadata_only or self.match_package_checksum(pack.path, pack.checksum_type, pack.checksum): # package is already on disk or not required to_download = False if db_pack['channel_id'] == channel_id: # package is already in the channel to_link = False elif db_pack['channel_id'] == channel_id: # different package with SAME NVREA self.disassociate_package(db_pack) # just pass data from DB, they will be used if there is no RPM available pack.checksum = db_pack['checksum'] pack.checksum_type = db_pack['checksum_type'] pack.epoch = db_pack['epoch'] if to_download or to_link: to_process.append((pack, to_download, to_link)) num_to_process = len(to_process) if num_to_process == 0: log(0, "No new packages to sync.") # If we are just appending, we can exit if not self.strict: return else: log(0, "Packages already synced: %5d" % (num_passed - num_to_process)) log(0, "Packages to sync: %5d" % num_to_process) self.regen = True is_non_local_repo = (url.find("file:/") < 0) for (index, what) in enumerate(to_process): pack, to_download, to_link = what localpath = None # pylint: disable=W0703 try: log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA())) if to_download: pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only) pack.load_checksum_from_header() pack.upload_package(self.channel, metadata_only=self.metadata_only) except KeyboardInterrupt: raise except Exception: e = sys.exc_info()[1] log2stderr(0, e) if self.fail: raise to_process[index] = (pack, False, False) continue finally: if is_non_local_repo and localpath and os.path.exists(localpath): os.remove(localpath) log(0, "Linking packages to channel.") if self.strict: import_batch = [self.associate_package(pack) for pack in self.all_packages] else: import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] backend = SQLBackend() caller = "server.app.yumreposync" importer = ChannelPackageSubscription(import_batch, backend, caller=caller, repogen=False, strict=self.strict) importer.run() backend.commit()
def upload_updates(self, notices): batch = [] skipped_updates = 0 typemap = { 'security': 'Security Advisory', 'recommended': 'Bug Fix Advisory', 'bugfix': 'Bug Fix Advisory', 'optional': 'Product Enhancement Advisory', 'feature': 'Product Enhancement Advisory', 'enhancement': 'Product Enhancement Advisory' } for notice in notices: notice = self.fix_notice(notice) existing_errata = self.get_errata(notice['update_id']) e = Erratum() e['errata_from'] = notice['from'] e['advisory'] = notice['update_id'] e['advisory_name'] = notice['update_id'] e['advisory_rel'] = notice['version'] e['advisory_type'] = typemap.get(notice['type'], 'Product Enhancement Advisory') e['product'] = notice['release'] or 'Unknown' e['description'] = notice['description'] e['synopsis'] = notice['title'] or notice['update_id'] if (notice['type'] == 'security' and notice['severity'] and not e['synopsis'].startswith(notice['severity'] + ': ')): e['synopsis'] = notice['severity'] + ': ' + e['synopsis'] if 'summary' in notice and not notice['summary'] is None: e['topic'] = notice['summary'] else: e['topic'] = ' ' if 'solution' in notice and not notice['solution'] is None: e['solution'] = notice['solution'] else: e['solution'] = ' ' e['issue_date'] = self._to_db_date(notice['issued']) if notice['updated']: e['update_date'] = self._to_db_date(notice['updated']) else: e['update_date'] = self._to_db_date(notice['issued']) e['org_id'] = self.channel['org_id'] e['notes'] = '' e['channels'] = [] e['packages'] = [] e['files'] = [] if existing_errata: e['channels'] = existing_errata['channels'] e['packages'] = existing_errata['packages'] e['channels'].append({'label': self.channel_label}) for pkg in notice['pkglist'][0]['packages']: param_dict = { 'name': pkg['name'], 'version': pkg['version'], 'release': pkg['release'], 'arch': pkg['arch'], 'channel_id': int(self.channel['id']), } if pkg['epoch'] == '0': epochStatement = "(pevr.epoch is NULL or pevr.epoch = '0')" elif pkg['epoch'] is None or pkg['epoch'] == '': epochStatement = "pevr.epoch is NULL" else: epochStatement = "pevr.epoch = :epoch" param_dict['epoch'] = pkg['epoch'] if self.channel['org_id']: param_dict['org_id'] = self.channel['org_id'] orgStatement = "= :org_id" else: orgStatement = "is NULL" h = rhnSQL.prepare(""" select p.id, pevr.epoch, c.checksum, c.checksum_type from rhnPackage p join rhnPackagename pn on p.name_id = pn.id join rhnpackageevr pevr on p.evr_id = pevr.id join rhnpackagearch pa on p.package_arch_id = pa.id join rhnArchType at on pa.arch_type_id = at.id join rhnChecksumView c on p.checksum_id = c.id join rhnChannelPackage cp on p.id = cp.package_id where pn.name = :name and p.org_id %s and pevr.version = :version and pevr.release = :release and pa.label = :arch and %s and at.label = 'rpm' and cp.channel_id = :channel_id """ % (orgStatement, epochStatement)) h.execute(**param_dict) cs = h.fetchone_dict() or None if not cs: if param_dict.has_key('epoch'): epoch = param_dict['epoch'] + ":" else: epoch = "" log_debug( 1, "No checksum found for %s-%s%s-%s.%s." " Skipping Package" % (param_dict['name'], epoch, param_dict['version'], param_dict['release'], param_dict['arch'])) continue newpkgs = [] for oldpkg in e['packages']: if oldpkg['package_id'] != cs['id']: newpkgs.append(oldpkg) package = IncompletePackage().populate(pkg) package['epoch'] = cs['epoch'] package['org_id'] = self.channel['org_id'] package['checksums'] = {cs['checksum_type']: cs['checksum']} package['checksum_type'] = cs['checksum_type'] package['checksum'] = cs['checksum'] package['package_id'] = cs['id'] newpkgs.append(package) e['packages'] = newpkgs if len(e['packages']) == 0: skipped_updates = skipped_updates + 1 continue e['keywords'] = [] if notice['reboot_suggested']: kw = Keyword() kw.populate({'keyword': 'reboot_suggested'}) e['keywords'].append(kw) if notice['restart_suggested']: kw = Keyword() kw.populate({'keyword': 'restart_suggested'}) e['keywords'].append(kw) e['bugs'] = [] e['cve'] = [] if notice['references']: bzs = [ r for r in notice['references'] if r['type'] == 'bugzilla' ] if len(bzs): tmp = {} for bz in bzs: if bz['id'] not in tmp: bug = Bug() bug.populate({ 'bug_id': bz['id'], 'summary': bz['title'], 'href': bz['href'] }) e['bugs'].append(bug) tmp[bz['id']] = None cves = [r for r in notice['references'] if r['type'] == 'cve'] if len(cves): tmp = {} for cve in cves: if cve['id'] not in tmp: e['cve'].append(cve['id']) tmp[cve['id']] = None others = [ r for r in notice['references'] if not r['type'] == 'bugzilla' and not r['type'] == 'cve' ] if len(others): tmp = len(others) refers_to = "" for other in others: if refers_to: refers_to += "\n" refers_to += other['href'] e['refers_to'] = refers_to e['locally_modified'] = None batch.append(e) if skipped_updates > 0: self.print_msg("%d errata skipped because of empty package list." % skipped_updates) backend = SQLBackend() importer = ErrataImport(batch, backend) importer.run() self.regen = True
def print_channel_tree(self, repos=False): available_channel_tree = self._list_available_channels() backend = SQLBackend() if not available_channel_tree: log2stderr( 0, "No available channels were found. Is your %s activated for CDN?" % PRODUCT_NAME) return log(0, "p = previously imported/synced channel") log(0, ". = channel not yet imported/synced") log(0, "? = No CDN source provided to count number of packages") log(0, "Base channels:") for channel in sorted(available_channel_tree): if channel in self.synced_channels: status = 'p' else: status = '.' sources = self._get_content_sources(channel, backend) if sources: packages_number = '0' else: packages_number = '?' try: packages_number = open( constants.CDN_REPODATA_ROOT + '/' + channel + "/packages_num", 'r').read() # pylint: disable=W0703 except Exception: pass log(0, " %s %s %s" % (status, channel, packages_number)) if repos: if sources: for source in sources: log(0, " %s" % source['source_url']) else: log(0, " No CDN source provided!") # print information about child channels for channel in sorted(available_channel_tree): # Print only if there are any child channels if len(available_channel_tree[channel]) > 0: log(0, "%s:" % channel) for child in sorted(available_channel_tree[channel]): if child in self.synced_channels: status = 'p' else: status = '.' sources = self._get_content_sources(child, backend) if sources: packages_number = '0' else: packages_number = '?' try: packages_number = open( constants.CDN_REPODATA_ROOT + '/' + child + "/packages_num", 'r').read() # pylint: disable=W0703 except Exception: pass log(0, " %s %s %s" % (status, child, packages_number)) if repos: if sources: for source in sources: log(0, " %s" % source['source_url']) else: log(0, " No CDN source provided!")